QEUTVAZ4F4EJXRDMWDMYXF6XEDMX7YPVG4IIXEKPIR3K54E5W5OAC
ONES3V466GLO5CXKRF5ENK7VFOQPWM3YXLVRGWB56V5SH3W7XNBQC
DV4A2LR7Q5LAEGAQHLO34PZCHGJUHPAMRZFGT7GUFNKVQKPJNOYQC
OP6SVMOD2GTQ7VNJ4E5KYFG4MIYA7HBMXJTADALMZH4PY7OQRMZQC
WS4ZQM4RMIHZ6XZKSDQJGHN5SSSWFL4H236USOPUA33S6RC53RFAC
EAAYH6BQWDK52EC5RG3BEZQU3FJPN5RRRN4U5KDKDVPKXBVJMNDAC
R5AJGJPTY6CRGFJNBJEH4XOTOUZUI2FV4YKC3EZQQMYT2GMZJYKAC
FMN7X4J24EYPOJNBUWM4NKGWSJTRV2DHCIBMPV2AXLZVVAMNOBKQC
X3QVVQIS7B7L3XYZAWL3OOBUXOJ6RMOKQ45YMLLGAHYPEEKZ45ZAC
EHJFNMB2R4MYG6ZSHHEENRFCSPFVWKVHLD5DXAE5HXUGXP5ZVHKQC
UAQX27N4PI4LHEW6LSHJETIE5MV7JTEMPLTJFYUBMYVPC43H7VOAC
#[derive(Debug)]
#[repr(C)]
struct Header {
n: u16,
data: u16,
crc: u32,
left_page: u64,
}
impl Header {
fn init(&mut self) {
self.n = (1u16).to_le(); // dirty page
self.data = 4096_u16.to_le();
self.crc = 0;
self.left_page = 0;
}
fn n(&self) -> u16 {
u16::from_le(self.n) >> 4
}
fn set_n(&mut self, n: u16) {
let dirty = u16::from_le(self.n) & 1;
self.n = ((n << 4) | dirty).to_le()
}
fn is_dirty(&self) -> bool {
u16::from_le(self.n) & 1 != 0
}
fn left_page(&self) -> u64 {
u64::from_le(self.left_page)
}
fn decr(&mut self, s: usize) {
self.left_page = (self.left_page() - s as u64).to_le();
}
fn is_leaf(&self) -> bool {
u64::from_le(self.left_page) <= 0xfff
}
}
const HDR: usize = core::mem::size_of::<Header>();
}
fn header<'a>(page: crate::Page<'a>) -> &'a Header {
unsafe { &*(page.data.as_ptr() as *const Header) }
}
fn header_mut(page: &mut crate::MutPage) -> &mut Header {
unsafe { &mut *(page.0.data as *mut Header) }
}
trait Alloc {
fn extra_size<T, K: Representable<T>, V: Representable<T>>() -> usize;
fn can_alloc<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool;
fn can_compact<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool;
fn alloc(hdr: &mut Header, size: usize, align: usize) -> u16;
// n = number of items to remove
fn truncate_left<T, K: Representable<T>, V: Representable<T>>(
page: &mut MutPage,
n: usize,
) -> Option<(*const K, *const V)>;
fn alloc_insert<T, K: Representable<T>, V: Representable<T>>(
new: &mut MutPage,
n: &mut isize,
size: usize,
r: u64,
) -> usize;
fn set_offset(new: &mut MutPage, n: isize, r: u64, off: u16);
fn set_right_child(new: &mut MutPage, n: isize, r: u64);
type Offset: Into<(u64, u16)> + Copy + core::fmt::Debug;
fn offset_slice<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page<'a>,
) -> Offsets<'a, Self::Offset>;
fn kv<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page,
off: (u64, u16),
) -> (&'a K, &'a V, u64);
}
#[derive(Debug, Clone)]
enum Offsets<'a, A> {
Slice(&'a [A]),
Range(core::ops::Range<usize>),
}
impl<'a, A: Into<(u64, u16)> + Copy> Offsets<'a, A> {
fn split_at(&self, mid: usize) -> (Self, Self) {
match self {
Offsets::Slice(s) if mid < s.len() => {
debug!("split_at: {:?} {:?}", s.len(), mid);
let (a, b) = s.split_at(mid);
(Offsets::Slice(a), Offsets::Slice(b))
}
Offsets::Slice(s) => {
debug_assert_eq!(mid, s.len());
(Offsets::Slice(s), Offsets::Slice(&[][..]))
}
Offsets::Range(r) => (
Offsets::Range(r.start..r.start + mid),
Offsets::Range(r.start + mid..r.end),
),
}
}
fn first<T, K: Representable<T>, V: Representable<T>>(&self) -> (u64, u16) {
match self {
Offsets::Slice(s) => s[0].into(),
Offsets::Range(r) => {
let size = fixed_size::<T, K, V>().unwrap();
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
(0, (hdr_size + r.start * size) as u16)
}
}
}
}
struct Leaf {}
struct Internal {}
impl Alloc for Leaf {
fn extra_size<T, K: Representable<T>, V: Representable<T>>() -> usize {
if fixed_size::<T, K, V>().is_some() {
0
} else {
2
}
}
fn can_alloc<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
if let Some(f) = fixed_size::<T, K, V>() {
let al = K::ALIGN.max(V::ALIGN);
let header_size = (HDR + al - 1) & !(al - 1);
debug!(
"Leaf::can_alloc: {:?} {:?} {:?}",
header_size, size, hdr.data
);
header_size + (hdr.n() as usize) * f + size < u16::from_le(hdr.data) as usize
} else {
HDR + (hdr.n() as usize) * 2 + 2 + size < u16::from_le(hdr.data) as usize
}
}
fn can_compact<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
if fixed_size::<T, K, V>().is_some() {
let al = K::ALIGN.max(V::ALIGN);
let header_size = (HDR + al - 1) & !(al - 1);
header_size + ((hdr.left_page() & 0xfff) as usize) + size
< u16::from_le(hdr.data) as usize
} else {
HDR + ((hdr.left_page() & 0xfff) as usize) + 2 + size < 4096
}
}
fn truncate_left<T, K: Representable<T>, V: Representable<T>>(
page: &mut MutPage,
n: usize,
) -> Option<(*const K, *const V)> {
debug!("truncate_left {:?} {:?}", page, n);
if let Some(f) = fixed_size::<T, K, V>() {
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
// debug!("{:?} {:?} {:?}", new, hdr.n(), *n);
let hdr_n = header_mut(page).n();
unsafe {
let mut swap: core::mem::MaybeUninit<Tuple<K, V>> =
core::mem::MaybeUninit::uninit();
core::ptr::copy(
page.0.data.add(hdr_size + (n - 1) * f),
swap.as_mut_ptr() as *mut u8,
f,
);
core::ptr::copy(
page.0.data.add(hdr_size + n * f),
page.0.data.add(hdr_size),
(hdr_n as usize - n) * f,
);
core::ptr::copy(
swap.as_ptr() as *mut u8,
page.0.data.add(hdr_size + (hdr_n as usize - n) * f),
f,
);
}
debug!("{:?} - {:?}", hdr_n, n);
let hdr = header_mut(page);
hdr.set_n(hdr_n - n as u16);
hdr.left_page = (hdr.left_page() - (n * f) as u64).to_le();
unsafe {
Some(read::<T, K, V>(
page.0.data.add(hdr_size + (hdr_n as usize - n) * f),
))
}
} else {
let hdr_n = header_mut(page).n();
unsafe {
core::ptr::copy(
page.0.data.add(HDR + n * 2),
page.0.data.add(HDR),
(hdr_n as usize - n) * 2,
);
}
let deleted_offsets = unsafe {
core::slice::from_raw_parts(page.0.data.add(HDR) as *const u16, n as usize)
};
let deleted_size: u64 = deleted_offsets
.iter()
.map(|&off| {
2 + unsafe { entry_size::<T, K, V>(page.0.data.add(off as usize)) } as u64
})
.sum();
let hdr = header_mut(page);
hdr.set_n(hdr_n - n as u16);
hdr.left_page = (hdr.left_page() - deleted_size).to_le();
None
}
}
fn alloc(hdr: &mut Header, size: usize, align: usize) -> u16 {
let mut data = u16::from_le(hdr.data) - size as u16;
data &= !((align - 1) as u16);
hdr.data = data.to_le();
hdr.set_n(hdr.n() + 1);
hdr.left_page = (hdr.left_page() + size as u64).to_le();
data
}
fn alloc_insert<T, K: Representable<T>, V: Representable<T>>(
new: &mut MutPage,
n: &mut isize,
size: usize,
_: u64,
) -> usize {
if let Some(f) = fixed_size::<T, K, V>() {
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
// debug!("{:?} {:?} {:?}", new, hdr.n(), *n);
let hdr_n = header_mut(new).n();
unsafe {
core::ptr::copy(
new.0.data.add(hdr_size + (*n as usize) * f),
new.0.data.add(hdr_size + (*n as usize) * f + f),
(hdr_n as usize - (*n as usize)) * f,
);
}
let hdr = header_mut(new);
hdr.set_n(hdr.n() + 1);
hdr.left_page = (hdr.left_page() + f as u64).to_le();
hdr_size + (*n as usize) * f
} else {
let (hdr_n, off_new) = {
let hdr = header_mut(new);
(
hdr.n(),
Self::alloc(&mut *hdr, 2 + size, K::ALIGN.max(V::ALIGN)),
)
};
unsafe {
core::ptr::copy(
new.0.data.add(HDR + (*n as usize) * 2),
new.0.data.add(HDR + (*n as usize) * 2 + 2),
(hdr_n as usize - (*n as usize)) * 2,
);
}
Self::set_offset(new, *n, 0, off_new);
off_new as usize
}
}
fn set_offset(new: &mut MutPage, n: isize, _: u64, off: u16) {
unsafe {
let ptr = new.0.data.offset(HDR as isize + n * 2) as *mut u16;
*ptr = off.to_le();
}
}
fn set_right_child(_: &mut MutPage, _: isize, _: u64) {}
type Offset = LeafOffset;
fn offset_slice<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page<'a>,
) -> Offsets<'a, Self::Offset> {
let hdr = header(page);
if fixed_size::<T, K, V>().is_some() {
Offsets::Range(0..(hdr.n() as usize))
} else {
unsafe {
Offsets::Slice(core::slice::from_raw_parts(
page.data.as_ptr().add(HDR) as *const LeafOffset,
hdr.n() as usize,
))
}
}
}
fn kv<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page,
(_, off): (u64, u16),
) -> (&'a K, &'a V, u64) {
unsafe {
let (k, v) = read::<T, K, V>(page.data.as_ptr().add(off as usize));
(&*k, &*v, 0)
}
}
}
impl Alloc for Internal {
fn extra_size<T, K: Representable<T>, V: Representable<T>>() -> usize {
8
}
fn can_alloc<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
debug!("Internal::can_alloc: {:?} {:?}", hdr.n(), hdr.data);
(HDR as usize) + (hdr.n() as usize) * 8 + 8 + size < u16::from_le(hdr.data) as usize
}
fn can_compact<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
(HDR as usize) + ((hdr.left_page() & 0xfff) as usize) + 8 + size < 4096
}
fn truncate_left<T, K: Representable<T>, V: Representable<T>>(
page: &mut MutPage,
n: usize,
) -> Option<(*const K, *const V)> {
// The following line copies the left child of the last entry
// (hence the `- 8` and `- 1`)
let hdr_n = header_mut(page).n();
unsafe {
core::ptr::copy(
page.0.data.add(HDR + (n - 1) * 8),
page.0.data.add(HDR - 8),
(hdr_n as usize - n + 1) * 8,
);
}
let size = if let Some(f) = fixed_size::<T, K, V>() {
((8 + f) * (hdr_n as usize - n)) as u64
} else {
let offsets = unsafe {
core::slice::from_raw_parts(
page.0.data.add(HDR + n * 8) as *const u16,
hdr_n as usize - n as usize,
)
};
offsets
.iter()
.map(|&off| {
8 + unsafe { entry_size::<T, K, V>(page.0.data.add(off as usize)) } as u64
})
.sum()
};
let hdr = header_mut(page);
hdr.set_n(hdr_n - n as u16);
debug!("truncate_left {:?} {:?}", hdr.left_page(), size);
hdr.left_page = ((hdr.left_page() & !0xfff) | size).to_le();
None
}
fn alloc(hdr: &mut Header, size: usize, align: usize) -> u16 {
debug!("data = {:?}, size = {:?}", hdr.data, size);
let mut data = u16::from_le(hdr.data) - size as u16;
data -= data % (align as u16);
hdr.data = data.to_le();
hdr.set_n(hdr.n() + 1);
hdr.left_page = (hdr.left_page() + size as u64).to_le();
data
}
fn alloc_insert<T, K: Representable<T>, V: Representable<T>>(
new: &mut MutPage,
n: &mut isize,
size: usize,
r: u64,
) -> usize {
let (hdr_n, off_new) = {
let hdr = header_mut(new);
(
hdr.n(),
Self::alloc(&mut *hdr, size, K::ALIGN.max(V::ALIGN)),
)
};
unsafe {
core::ptr::copy(
new.0.data.add(HDR + (*n as usize) * 8),
new.0.data.add(HDR + (*n as usize) * 8 + 8),
(hdr_n as usize - (*n as usize)) * 8,
);
}
Self::set_offset(new, *n, r, off_new);
off_new as usize
}
fn set_offset(new: &mut MutPage, n: isize, r: u64, off: u16) {
unsafe {
let ptr = new.0.data.offset(HDR as isize + n * 8) as *mut u64;
*ptr = (r | off as u64).to_le();
}
}
fn set_right_child(page: &mut MutPage, n: isize, r: u64) {
unsafe {
let ptr = page.0.data.offset(HDR as isize + n * 8) as *mut u64;
let off = u64::from_le(*ptr) & 0xfff;
*ptr = (r | off as u64).to_le();
}
}
type Offset = InternalOffset;
fn offset_slice<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page<'a>,
) -> Offsets<'a, Self::Offset> {
let hdr = header(page);
unsafe {
Offsets::Slice(core::slice::from_raw_parts(
page.data.as_ptr().add(HDR) as *const InternalOffset,
hdr.n() as usize,
))
}
}
fn kv<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page,
(r, off): (u64, u16),
) -> (&'a K, &'a V, u64) {
unsafe {
let (k, v) = read::<T, K, V>(page.data.as_ptr().add(off as usize));
(&*k, &*v, r)
}
}
fn rebalance_left<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
m: &mut Concat<'a, T, K, V, Page<K, V>>,
) -> Result<Op<'a, T, K, V>, T::Error> {
assert!(m.mod_is_left);
// First element of the right page. We'll rotate it to the left
// page.
let rc = <Page<K, V>>::first_cursor(m.other.as_page());
let rl = <Page<K, V>>::left_child(m.other.as_page(), &rc);
let (k, v, r) = unsafe { <Page<K, V>>::unchecked_current(m.other.as_page(), &rc) };
let mut freed = [0, 0];
let b = if header(m.modified.page.as_page()).is_dirty() {
1
} else {
0
};
freed[0] = m.modified.page.offset | b;
let mut new_left = if let Some((k, v)) = m.modified.ins {
if let Put::Ok(Ok { page, .. }) = <Page<K, V>>::replace(
txn,
m.modified.page,
m.modified.mutable,
&m.modified.c1,
k,
v,
m.modified.ins2,
m.modified.l,
m.modified.r,
)? {
page
} else {
unreachable!()
}
} else {
<Page<K, V>>::del(txn, m.modified.page, &m.modified.c1, m.modified.l)?
};
let mut n = header(new_left.0.as_page()).n() as isize;
alloc::<T, K, V, L>(&mut new_left, m.mid.0, m.mid.1, 0, rl, &mut n);
let right_hdr = header(m.other.as_page());
let (new_right, k, v) = match fixed_size::<T, K, V>() {
Some(f) if r == 0 && right_hdr.is_dirty() => {
// Rewrite the deleted element at the end of the page, so that
// the pointer is still valid.
let data = m.other.data;
let mut other = MutPage(m.other);
let right_hdr = header_mut(&mut other);
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
let n = right_hdr.n() as usize;
right_hdr.decr(f);
right_hdr.set_n((n - 1) as u16);
let mut t: MaybeUninit<Tuple<K, V>> = MaybeUninit::uninit();
unsafe {
core::ptr::copy_nonoverlapping(
data.add(hdr_size) as *mut Tuple<K, V>,
t.as_mut_ptr(),
1,
);
core::ptr::copy(
data.add(hdr_size + f) as *const Tuple<K, V>,
data.add(hdr_size) as *mut Tuple<K, V>,
n - 1,
);
let last = data.add(hdr_size + f * (n - 1)) as *mut Tuple<K, V>;
core::ptr::copy_nonoverlapping(t.as_ptr(), last, 1);
debug!("tuple = {:?}", t.assume_init());
(other, &(&*last).k, &(&*last).v)
}
}
_ => unsafe {
(
<Page<K, V>>::del(txn, m.other, &rc, r)?,
core::mem::transmute(k),
core::mem::transmute(v),
)
},
};
if new_right.0.offset != m.other.offset {
let hdr = &*header(m.other.as_page());
let b = if hdr.is_dirty() { 1 } else { 0 };
freed[1] = m.other.offset | b
}
Ok(Op::Rebalanced {
l: new_left.0.offset,
r: new_right.0.offset,
k: unsafe { core::mem::transmute(k) },
v: unsafe { core::mem::transmute(v) },
freed,
})
}
fn rebalance_right<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
m: &mut Concat<'a, T, K, V, Page<K, V>>,
) -> Result<Op<'a, T, K, V>, T::Error> {
assert!(!m.mod_is_left);
// Take the last element of the left page.
let lc = <Page<K, V>>::last_cursor(m.other.as_page());
let (k0, v0, r0) = unsafe { <Page<K, V>>::unchecked_current(m.other.as_page(), &lc) };
// First element of the right page.
let rc = <Page<K, V>>::first_cursor(m.modified.page.as_page());
let rl = <Page<K, V>>::left_child(m.modified.page.as_page(), &rc);
let mut freed = [0, 0];
let b = if header(m.modified.page.as_page()).is_dirty() {
1
} else {
0
};
freed[0] = m.modified.page.offset | b;
let mut new_right = if let Some((k, v)) = m.modified.ins {
if let Put::Ok(Ok { page, .. }) = <Page<K, V>>::replace(
txn,
m.modified.page,
m.modified.mutable,
&m.modified.c1,
k,
v,
m.modified.ins2,
m.modified.l,
m.modified.r,
)? {
page
} else {
unreachable!()
}
} else {
<Page<K, V>>::del(txn, m.modified.page, &m.modified.c1, m.modified.l)?
};
if let Put::Ok(Ok { page, .. }) =
<Page<K, V>>::put(txn, new_right.0, true, &rc, m.mid.0, m.mid.1, None, r0, rl)?
{
new_right = page
} else {
unreachable!()
};
let new_left = <Page<K, V>>::del(txn, m.other, &lc, 0)?;
if new_left.0.offset != m.other.offset {
let hdr = &*header(m.other.as_page());
let b = if hdr.is_dirty() { 1 } else { 0 };
freed[1] = m.other.offset | b
}
Ok(Op::Rebalanced {
l: new_left.0.offset,
r: new_right.0.offset,
k: unsafe { core::mem::transmute(k0) },
v: unsafe { core::mem::transmute(v0) },
freed,
})
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
struct LeafOffset(u16);
impl Into<(u64, u16)> for LeafOffset {
fn into(self) -> (u64, u16) {
(0, self.0)
}
}
impl Into<usize> for LeafOffset {
fn into(self) -> usize {
self.0 as usize
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
struct InternalOffset(u64);
impl Into<(u64, u16)> for InternalOffset {
fn into(self) -> (u64, u16) {
(self.0 & !0xfff, (self.0 & 0xfff) as u16)
}
}
impl Into<usize> for InternalOffset {
fn into(self) -> usize {
self.0 as usize
}
}
fn put<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
page: CowPage,
mutable: bool,
u: usize,
k0: &'a K,
v0: &'a V,
k1v1: Option<(&'a K, &'a V)>,
l: u64,
r: u64,
) -> Result<Put<'a, K, V>, T::Error> {
let size = alloc_size(k0, v0)
+ if let Some((k1, v1)) = k1v1 {
alloc_size(k1, v1)
} else {
0
};
let hdr = header(page.as_page());
let is_dirty = hdr.is_dirty();
debug!("put {:?} {:?} {:?}", u, mutable, is_dirty);
if mutable && is_dirty && L::can_alloc::<T, K, V>(header(page.as_page()), size) {
debug!("can alloc");
let mut page = unsafe { core::mem::transmute(page) };
let mut n = u as isize;
if let Some((k1, v1)) = k1v1 {
alloc::<_, _, _, L>(&mut page, k0, v0, 0, 0, &mut n);
alloc::<_, _, _, L>(&mut page, k1, v1, l, r, &mut n);
} else {
alloc::<_, _, _, L>(&mut page, k0, v0, l, r, &mut n);
}
Ok(Put::Ok(Ok { page, freed: 0 }))
} else if L::can_compact::<T, K, V>(hdr, size) {
let mut new = txn.alloc_page()?;
debug!("can compact: {:?}", new);
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut new);
L::set_right_child(&mut new, -1, hdr.left_page & !0xfff);
let s = L::offset_slice::<T, K, V>(page.as_page());
let (s0, s1) = s.split_at(u as usize);
let mut n = 0;
clone::<T, K, V, L>(page.as_page(), &mut new, s0, &mut n);
if let Some((k1, v1)) = k1v1 {
alloc::<_, _, _, L>(&mut new, k0, v0, 0, 0, &mut n);
alloc::<_, _, _, L>(&mut new, k1, v1, l, r, &mut n);
} else {
alloc::<_, _, _, L>(&mut new, k0, v0, l, r, &mut n);
}
clone::<T, K, V, L>(page.as_page(), &mut new, s1, &mut n);
let b0 = if is_dirty { 1 } else { 0 };
return Ok(Put::Ok(Ok {
page: new,
freed: page.offset | b0,
}));
} else {
debug!("split");
if let Some(s) = fixed_size::<T, K, V>() {
return split::<_, _, _, L>(txn, page, mutable, s, u, k0, v0, l, r);
} else {
return split_unsized::<_, _, _, L>(txn, page.as_page(), u, k0, v0, k1v1, l, r);
}
}
}
fn split<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
page: CowPage,
mutable: bool,
size: usize,
u: usize,
k0: &'a K,
v0: &'a V,
l: u64,
r: u64,
) -> Result<Put<'a, K, V>, T::Error> {
let mut left;
let hdr = header(page.as_page());
let page_is_dirty = if hdr.is_dirty() { 1 } else { 0 };
let mut n = hdr.n();
let k = n / 2;
n += 1;
let s = L::offset_slice::<T, K, V>(page.as_page());
let (s0, s1) = s.split_at(k as usize);
debug!("u = {:?}, k = {:?} {:?} {:?}", u, k, s0, s1);
let (mut split_key, mut split_value, mid_child, s1) = if u == k as usize {
// The inserted element is exactly in the middle.
(k0, v0, r, s1)
} else {
let (s1a, s1b) = s1.split_at(1);
let (k, v, r) = L::kv(page.as_page(), s1a.first::<T, K, V>());
debug!("k = {:?}, v = {:?} r = {:?}", k, v, r);
debug!("k = {:?}", k as *const K as *const u8);
(k, v, r, s1b)
};
let mut freed = 0;
if u >= k as usize {
if mutable && hdr.is_dirty() {
debug!("mutable dirty {:?} >= {:?}", u, k);
// (k0, v0) is to be inserted on the right-hand side of
// the split, hence we don't have to clone the left-hand
// side, we can just truncate it.
left = unsafe { core::mem::transmute(page) };
let hdr = header_mut(&mut left);
hdr.set_n(k);
hdr.decr((n - 1 - k) as usize * size);
} else {
left = txn.alloc_page()?;
debug!("immutable {:?} >= {:?}", u, k);
let mut n = 0;
clone::<T, K, V, L>(page.as_page(), &mut left, s0, &mut n);
freed = page.offset | page_is_dirty
}
// If we are here, u >= k, i.e. the insertion is in the right-hand
// side of the split.
let mut n = 0;
let mut right = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut right);
if u > k as usize {
let kk = u as usize - k as usize - 1;
let (s1a, s1b) = s1.split_at(kk);
L::set_right_child(&mut right, -1, mid_child);
clone::<T, K, V, L>(page.as_page(), &mut right, s1a, &mut n);
alloc::<T, K, V, L>(&mut right, k0, v0, l, r, &mut n);
clone::<T, K, V, L>(page.as_page(), &mut right, s1b, &mut n);
} else {
// Insertion in the middle:
// - `l` becomes the right child of the last element on `left`.
L::set_right_child(&mut left, k as isize - 1, l);
// - `r` (i.e. `mid_child`) becomes the left child of `right`.
L::set_right_child(&mut right, -1, mid_child);
clone::<T, K, V, L>(page.as_page(), &mut right, s1, &mut n);
}
Ok(Put::Split {
split_key,
split_value,
left,
right,
freed,
})
} else {
left = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut left);
debug!("{:?} < {:?}", u, k);
let mut n = 0;
let ll = header(page.as_page()).left_page() & !0xfff;
L::set_right_child(&mut left, -1, ll);
let (s0a, s0b) = s0.split_at(u as usize);
clone::<T, K, V, L>(page.as_page(), &mut left, s0a, &mut n);
alloc::<T, K, V, L>(&mut left, k0, v0, l, r, &mut n);
clone::<T, K, V, L>(page.as_page(), &mut left, s0b, &mut n);
let mut right: MutPage;
let freed;
if mutable && hdr.is_dirty() {
right = unsafe { core::mem::transmute(page) };
if let Some((k, v)) = L::truncate_left::<T, K, V>(&mut right, k as usize + 1) {
unsafe {
split_key = &*k;
split_value = &*v;
}
}
freed = 0;
} else {
right = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut right);
let mut n = 0;
L::set_right_child(&mut right, -1, mid_child);
clone::<T, K, V, L>(page.as_page(), &mut right, s1, &mut n);
freed = page.offset | page_is_dirty
}
Ok(Put::Split {
split_key,
split_value,
left,
right,
freed,
})
}
}
fn split_unsized<
'a,
T: AllocPage+ core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
page: crate::Page,
u: usize,
k0: &'a K,
v0: &'a V,
k1v1: Option<(&'a K, &'a V)>,
l0: u64,
r0: u64,
) -> Result<Put<'a, K, V>, T::Error> {
let hdr = header(page);
let n0 = hdr.n();
let mut left = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut left);
let mut right = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut left);
let left_child = header(page).left_page() & !0xfff;
L::set_right_child(&mut left, -1, left_child);
let mut split = core::ptr::null();
let mut current_page = &mut left;
let mut n = 0;
let s = unsafe {core::slice::from_raw_parts(
page.data.as_ptr().add(HDR) as *const LeafOffset,
n0 as usize,
)};
let mut total = HDR;
let mut is_left = true;
for off in s.iter() {
let (r, off): (u64, u16) = (*off).into();
unsafe {
let ptr = page.data.as_ptr().add(off as usize);
let size = entry_size::<T, K, V>(ptr) + L::extra_size::<T, K, V>();
total += size;
if is_left && total >= PAGE_SIZE / 2 {
is_left = false;
split = ptr as *const Tuple<K, V>;
L::set_right_child(&mut right, -1, r);
current_page = &mut right;
} else {
let off_new = L::alloc(header_mut(current_page), size, K::ALIGN.max(V::ALIGN));
core::ptr::copy_nonoverlapping(ptr, current_page.0.data.offset(off_new as isize), size);
L::set_offset(current_page, n, r, off_new);
}
}
n += 1;
if n == u as isize {
if let Some((k1, v1)) = k1v1 {
alloc::<T, K, V, L>(current_page, k0, v0, 0, l0, &mut n);
total += alloc_size(k0, v0) + L::extra_size::<T, K, V>();
alloc::<T, K, V, L>(current_page, k1, v1, 0, r0, &mut n);
total += alloc_size(k1, v1) + L::extra_size::<T, K, V>();
n += 2;
} else {
alloc::<T, K, V, L>(current_page, k0, v0, l0, r0, &mut n);
total += alloc_size(k0, v0) + L::extra_size::<T, K, V>();
n += 1;
}
}
}
assert!(!split.is_null());
let split = unsafe { &*split };
Ok(Put::Split {
split_key: &split.k,
split_value: &split.v,
left,
right,
freed: page.offset,
})
}
use super::*;
use core::mem::MaybeUninit;
pub(crate) fn rebalance_left<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
m: &mut Concat<'a, T, K, V, Page<K, V>>,
) -> Result<Op<'a, T, K, V>, T::Error> {
assert!(m.mod_is_left);
// First element of the right page. We'll rotate it to the left
// page.
let rc = <Page<K, V>>::first_cursor(m.other.as_page());
let rl = <Page<K, V>>::left_child(m.other.as_page(), &rc);
let (k, v, r) = unsafe { <Page<K, V>>::unchecked_current(m.other.as_page(), &rc) };
let mut freed = [0, 0];
let b = if header(m.modified.page.as_page()).is_dirty() {
1
} else {
0
};
freed[0] = m.modified.page.offset | b;
let mut new_left = if let Some((k, v)) = m.modified.ins {
if let Put::Ok(Ok { page, .. }) = <Page<K, V>>::replace(
txn,
m.modified.page,
m.modified.mutable,
&m.modified.c1,
k,
v,
m.modified.ins2,
m.modified.l,
m.modified.r,
)? {
page
} else {
unreachable!()
}
} else {
<Page<K, V>>::del(txn, m.modified.page, &m.modified.c1, m.modified.l)?
};
let mut n = header(new_left.0.as_page()).n() as isize;
alloc::<T, K, V, L>(&mut new_left, m.mid.0, m.mid.1, 0, rl, &mut n);
let right_hdr = header(m.other.as_page());
let (new_right, k, v) = match fixed_size::<T, K, V>() {
Some(f) if r == 0 && right_hdr.is_dirty() => {
// Rewrite the deleted element at the end of the page, so that
// the pointer is still valid.
let data = m.other.data;
let mut other = MutPage(m.other);
let right_hdr = header_mut(&mut other);
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
let n = right_hdr.n() as usize;
right_hdr.decr(f);
right_hdr.set_n((n - 1) as u16);
let mut t: MaybeUninit<Tuple<K, V>> = MaybeUninit::uninit();
unsafe {
core::ptr::copy_nonoverlapping(
data.add(hdr_size) as *mut Tuple<K, V>,
t.as_mut_ptr(),
1,
);
core::ptr::copy(
data.add(hdr_size + f) as *const Tuple<K, V>,
data.add(hdr_size) as *mut Tuple<K, V>,
n - 1,
);
let last = data.add(hdr_size + f * (n - 1)) as *mut Tuple<K, V>;
core::ptr::copy_nonoverlapping(t.as_ptr(), last, 1);
debug!("tuple = {:?}", t.assume_init());
(other, &(&*last).k, &(&*last).v)
}
}
_ => unsafe {
(
<Page<K, V>>::del(txn, m.other, &rc, r)?,
core::mem::transmute(k),
core::mem::transmute(v),
)
},
};
if new_right.0.offset != m.other.offset {
let hdr = &*header(m.other.as_page());
let b = if hdr.is_dirty() { 1 } else { 0 };
freed[1] = m.other.offset | b
}
Ok(Op::Rebalanced {
l: new_left.0.offset,
r: new_right.0.offset,
k: unsafe { core::mem::transmute(k) },
v: unsafe { core::mem::transmute(v) },
freed,
})
}
pub(crate) fn rebalance_right<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
m: &mut Concat<'a, T, K, V, Page<K, V>>,
) -> Result<Op<'a, T, K, V>, T::Error> {
assert!(!m.mod_is_left);
// Take the last element of the left page.
let lc = <Page<K, V>>::last_cursor(m.other.as_page());
let (k0, v0, r0) = unsafe { <Page<K, V>>::unchecked_current(m.other.as_page(), &lc) };
// First element of the right page.
let rc = <Page<K, V>>::first_cursor(m.modified.page.as_page());
let rl = <Page<K, V>>::left_child(m.modified.page.as_page(), &rc);
let mut freed = [0, 0];
let b = if header(m.modified.page.as_page()).is_dirty() {
1
} else {
0
};
freed[0] = m.modified.page.offset | b;
let mut new_right = if let Some((k, v)) = m.modified.ins {
if let Put::Ok(Ok { page, .. }) = <Page<K, V>>::replace(
txn,
m.modified.page,
m.modified.mutable,
&m.modified.c1,
k,
v,
m.modified.ins2,
m.modified.l,
m.modified.r,
)? {
page
} else {
unreachable!()
}
} else {
<Page<K, V>>::del(txn, m.modified.page, &m.modified.c1, m.modified.l)?
};
if let Put::Ok(Ok { page, .. }) =
<Page<K, V>>::put(txn, new_right.0, true, &rc, m.mid.0, m.mid.1, None, r0, rl)?
{
new_right = page
} else {
unreachable!()
};
let new_left = <Page<K, V>>::del(txn, m.other, &lc, 0)?;
if new_left.0.offset != m.other.offset {
let hdr = &*header(m.other.as_page());
let b = if hdr.is_dirty() { 1 } else { 0 };
freed[1] = m.other.offset | b
}
Ok(Op::Rebalanced {
l: new_left.0.offset,
r: new_right.0.offset,
k: unsafe { core::mem::transmute(k0) },
v: unsafe { core::mem::transmute(v0) },
freed,
})
}
use super::*;
pub(crate) fn put<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
page: CowPage,
mutable: bool,
u: usize,
k0: &'a K,
v0: &'a V,
k1v1: Option<(&'a K, &'a V)>,
l: u64,
r: u64,
) -> Result<Put<'a, K, V>, T::Error> {
let size = alloc_size(k0, v0)
+ if let Some((k1, v1)) = k1v1 {
alloc_size(k1, v1)
} else {
0
};
let hdr = header(page.as_page());
let is_dirty = hdr.is_dirty();
debug!("put {:?} {:?} {:?}", u, mutable, is_dirty);
if mutable && is_dirty && L::can_alloc::<T, K, V>(header(page.as_page()), size) {
debug!("can alloc");
let mut page = unsafe { core::mem::transmute(page) };
let mut n = u as isize;
if let Some((k1, v1)) = k1v1 {
alloc::<_, _, _, L>(&mut page, k0, v0, 0, 0, &mut n);
alloc::<_, _, _, L>(&mut page, k1, v1, l, r, &mut n);
} else {
alloc::<_, _, _, L>(&mut page, k0, v0, l, r, &mut n);
}
Ok(Put::Ok(Ok { page, freed: 0 }))
} else if L::can_compact::<T, K, V>(hdr, size) {
let mut new = txn.alloc_page()?;
debug!("can compact: {:?}", new);
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut new);
L::set_right_child(&mut new, -1, hdr.left_page() & !0xfff);
let s = L::offset_slice::<T, K, V>(page.as_page());
let (s0, s1) = s.split_at(u as usize);
let mut n = 0;
clone::<T, K, V, L>(page.as_page(), &mut new, s0, &mut n);
if let Some((k1, v1)) = k1v1 {
alloc::<_, _, _, L>(&mut new, k0, v0, 0, 0, &mut n);
alloc::<_, _, _, L>(&mut new, k1, v1, l, r, &mut n);
} else {
alloc::<_, _, _, L>(&mut new, k0, v0, l, r, &mut n);
}
clone::<T, K, V, L>(page.as_page(), &mut new, s1, &mut n);
let b0 = if is_dirty { 1 } else { 0 };
return Ok(Put::Ok(Ok {
page: new,
freed: page.offset | b0,
}));
} else {
debug!("split");
if let Some(s) = fixed_size::<T, K, V>() {
assert!(k1v1.is_none());
return split::<_, _, _, L>(txn, page, mutable, s, u, k0, v0, l, r);
} else {
return split_unsized::<_, _, _, L>(txn, page.as_page(), u, k0, v0, k1v1, l, r);
}
}
}
fn split<
'a,
T: AllocPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
page: CowPage,
mutable: bool,
size: usize,
u: usize,
k0: &'a K,
v0: &'a V,
l: u64,
r: u64,
) -> Result<Put<'a, K, V>, T::Error> {
let mut left;
let hdr = header(page.as_page());
let page_is_dirty = if hdr.is_dirty() { 1 } else { 0 };
let mut n = hdr.n();
let k = n / 2;
n += 1;
let s = L::offset_slice::<T, K, V>(page.as_page());
let (s0, s1) = s.split_at(k as usize);
debug!("u = {:?}, k = {:?} {:?} {:?}", u, k, s0, s1);
let (mut split_key, mut split_value, mid_child, s1) = if u == k as usize {
// The inserted element is exactly in the middle.
(k0, v0, r, s1)
} else {
let (s1a, s1b) = s1.split_at(1);
let (k, v, r) = L::kv(page.as_page(), s1a.first::<T, K, V>());
debug!("k = {:?}, v = {:?} r = {:?}", k, v, r);
debug!("k = {:?}", k as *const K as *const u8);
(k, v, r, s1b)
};
let mut freed = 0;
if u >= k as usize {
if mutable && hdr.is_dirty() {
debug!("mutable dirty {:?} >= {:?}", u, k);
// (k0, v0) is to be inserted on the right-hand side of
// the split, hence we don't have to clone the left-hand
// side, we can just truncate it.
left = unsafe { core::mem::transmute(page) };
let hdr = header_mut(&mut left);
hdr.set_n(k);
hdr.decr((n - 1 - k) as usize * size);
} else {
left = txn.alloc_page()?;
debug!("immutable {:?} >= {:?}", u, k);
let mut n = 0;
clone::<T, K, V, L>(page.as_page(), &mut left, s0, &mut n);
freed = page.offset | page_is_dirty
}
// If we are here, u >= k, i.e. the insertion is in the right-hand
// side of the split.
let mut n = 0;
let mut right = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut right);
if u > k as usize {
let kk = u as usize - k as usize - 1;
let (s1a, s1b) = s1.split_at(kk);
L::set_right_child(&mut right, -1, mid_child);
clone::<T, K, V, L>(page.as_page(), &mut right, s1a, &mut n);
alloc::<T, K, V, L>(&mut right, k0, v0, l, r, &mut n);
clone::<T, K, V, L>(page.as_page(), &mut right, s1b, &mut n);
} else {
// Insertion in the middle:
// - `l` becomes the right child of the last element on `left`.
L::set_right_child(&mut left, k as isize - 1, l);
// - `r` (i.e. `mid_child`) becomes the left child of `right`.
L::set_right_child(&mut right, -1, mid_child);
clone::<T, K, V, L>(page.as_page(), &mut right, s1, &mut n);
}
Ok(Put::Split {
split_key,
split_value,
left,
right,
freed,
})
} else {
left = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut left);
debug!("{:?} < {:?}", u, k);
let mut n = 0;
let ll = header(page.as_page()).left_page() & !0xfff;
L::set_right_child(&mut left, -1, ll);
let (s0a, s0b) = s0.split_at(u as usize);
clone::<T, K, V, L>(page.as_page(), &mut left, s0a, &mut n);
alloc::<T, K, V, L>(&mut left, k0, v0, l, r, &mut n);
clone::<T, K, V, L>(page.as_page(), &mut left, s0b, &mut n);
let mut right: MutPage;
let freed;
if mutable && hdr.is_dirty() {
right = unsafe { core::mem::transmute(page) };
if let Some((k, v)) = L::truncate_left::<T, K, V>(&mut right, k as usize + 1) {
unsafe {
split_key = &*k;
split_value = &*v;
}
}
freed = 0;
} else {
right = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut right);
let mut n = 0;
L::set_right_child(&mut right, -1, mid_child);
clone::<T, K, V, L>(page.as_page(), &mut right, s1, &mut n);
freed = page.offset | page_is_dirty
}
Ok(Put::Split {
split_key,
split_value,
left,
right,
freed,
})
}
}
fn split_unsized<
'a,
T: AllocPage+ core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
L: Alloc,
>(
txn: &mut T,
page: crate::Page,
u: usize,
k0: &'a K,
v0: &'a V,
k1v1: Option<(&'a K, &'a V)>,
l0: u64,
r0: u64,
) -> Result<Put<'a, K, V>, T::Error> {
let hdr = header(page);
let n0 = hdr.n();
let mut left = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut left);
let mut right = txn.alloc_page()?;
<Page<K, V> as BTreeMutPage<T, K, V>>::init(&mut left);
let left_child = header(page).left_page() & !0xfff;
L::set_right_child(&mut left, -1, left_child);
let mut split = core::ptr::null();
let mut current_page = &mut left;
let mut n = 0;
let s = unsafe {core::slice::from_raw_parts(
page.data.as_ptr().add(HDR) as *const LeafOffset,
n0 as usize,
)};
let mut total = HDR;
let mut is_left = true;
for off in s.iter() {
let (r, off): (u64, u16) = (*off).into();
unsafe {
let ptr = page.data.as_ptr().add(off as usize);
let size = entry_size::<T, K, V>(ptr) + L::extra_size::<T, K, V>();
total += size;
if is_left && total >= PAGE_SIZE / 2 {
is_left = false;
split = ptr as *const Tuple<K, V>;
L::set_right_child(&mut right, -1, r);
current_page = &mut right;
} else {
let off_new = L::alloc(header_mut(current_page), size, K::ALIGN.max(V::ALIGN));
core::ptr::copy_nonoverlapping(ptr, current_page.0.data.offset(off_new as isize), size);
L::set_offset(current_page, n, r, off_new);
}
}
n += 1;
if n == u as isize {
if let Some((k1, v1)) = k1v1 {
alloc::<T, K, V, L>(current_page, k0, v0, 0, l0, &mut n);
total += alloc_size(k0, v0) + L::extra_size::<T, K, V>();
alloc::<T, K, V, L>(current_page, k1, v1, 0, r0, &mut n);
total += alloc_size(k1, v1) + L::extra_size::<T, K, V>();
n += 2;
} else {
alloc::<T, K, V, L>(current_page, k0, v0, l0, r0, &mut n);
total += alloc_size(k0, v0) + L::extra_size::<T, K, V>();
n += 1;
}
}
}
assert!(!split.is_null());
let split = unsafe { &*split };
Ok(Put::Split {
split_key: &split.k,
split_value: &split.v,
left,
right,
freed: page.offset,
})
}
#[derive(Debug)]
#[repr(C)]
pub struct Header {
n: u16,
data: u16,
crc: u32,
left_page: u64,
}
impl Header {
pub fn init(&mut self) {
self.n = (1u16).to_le(); // dirty page
self.data = 4096_u16.to_le();
self.crc = 0;
self.left_page = 0;
}
pub fn n(&self) -> u16 {
u16::from_le(self.n) >> 4
}
pub fn set_n(&mut self, n: u16) {
let dirty = u16::from_le(self.n) & 1;
self.n = ((n << 4) | dirty).to_le()
}
pub fn is_dirty(&self) -> bool {
u16::from_le(self.n) & 1 != 0
}
pub fn left_page(&self) -> u64 {
u64::from_le(self.left_page)
}
pub fn set_left_page(&mut self, l: u64) {
self.left_page = l.to_le()
}
pub fn data(&self) -> u16 {
u16::from_le(self.data)
}
pub fn set_data(&mut self, d: u16) {
self.data = d.to_le()
}
pub fn decr(&mut self, s: usize) {
self.left_page = (self.left_page() - s as u64).to_le();
}
pub fn set_occupied(&mut self, size: u64) {
self.left_page = ((self.left_page() & !0xfff) | size).to_le()
}
pub fn incr(&mut self, s: usize) {
self.left_page = (self.left_page() + s as u64).to_le();
}
pub fn is_leaf(&self) -> bool {
u64::from_le(self.left_page) <= 0xfff
}
pub fn clean(&mut self) {
self.n = (u16::from_le(self.n) & 0xfff).to_le()
}
}
pub const HDR: usize = core::mem::size_of::<Header>();
pub fn header<'a>(page: crate::Page<'a>) -> &'a Header {
unsafe { &*(page.data.as_ptr() as *const Header) }
}
pub fn header_mut(page: &mut crate::MutPage) -> &mut Header {
unsafe { &mut *(page.0.data as *mut Header) }
}
use super::*;
pub(crate) trait Alloc {
fn extra_size<T, K: Representable<T>, V: Representable<T>>() -> usize;
fn can_alloc<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool;
fn can_compact<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool;
fn alloc(hdr: &mut Header, size: usize, align: usize) -> u16;
// n = number of items to remove
fn truncate_left<T, K: Representable<T>, V: Representable<T>>(
page: &mut MutPage,
n: usize,
) -> Option<(*const K, *const V)>;
fn alloc_insert<T, K: Representable<T>, V: Representable<T>>(
new: &mut MutPage,
n: &mut isize,
size: usize,
r: u64,
) -> usize;
fn set_offset(new: &mut MutPage, n: isize, r: u64, off: u16);
fn set_right_child(new: &mut MutPage, n: isize, r: u64);
type Offset: Into<(u64, u16)> + Copy + core::fmt::Debug;
fn offset_slice<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page<'a>,
) -> Offsets<'a, Self::Offset>;
fn kv<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page,
off: (u64, u16),
) -> (&'a K, &'a V, u64);
}
#[derive(Debug, Clone)]
pub enum Offsets<'a, A> {
Slice(&'a [A]),
Range(core::ops::Range<usize>),
}
impl<'a, A: Into<(u64, u16)> + Copy> Offsets<'a, A> {
pub fn split_at(&self, mid: usize) -> (Self, Self) {
match self {
Offsets::Slice(s) if mid < s.len() => {
debug!("split_at: {:?} {:?}", s.len(), mid);
let (a, b) = s.split_at(mid);
(Offsets::Slice(a), Offsets::Slice(b))
}
Offsets::Slice(s) => {
debug_assert_eq!(mid, s.len());
(Offsets::Slice(s), Offsets::Slice(&[][..]))
}
Offsets::Range(r) => (
Offsets::Range(r.start..r.start + mid),
Offsets::Range(r.start + mid..r.end),
),
}
}
pub fn first<T, K: Representable<T>, V: Representable<T>>(&self) -> (u64, u16) {
match self {
Offsets::Slice(s) => s[0].into(),
Offsets::Range(r) => {
let size = fixed_size::<T, K, V>().unwrap();
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
(0, (hdr_size + r.start * size) as u16)
}
}
}
}
pub struct Leaf {}
pub struct Internal {}
impl Alloc for Leaf {
fn extra_size<T, K: Representable<T>, V: Representable<T>>() -> usize {
if fixed_size::<T, K, V>().is_some() {
0
} else {
2
}
}
fn can_alloc<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
if let Some(f) = fixed_size::<T, K, V>() {
let al = K::ALIGN.max(V::ALIGN);
let header_size = (HDR + al - 1) & !(al - 1);
header_size + (hdr.n() as usize) * f + size < hdr.data() as usize
} else {
HDR + (hdr.n() as usize) * 2 + 2 + size < hdr.data() as usize
}
}
fn can_compact<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
if fixed_size::<T, K, V>().is_some() {
let al = K::ALIGN.max(V::ALIGN);
let header_size = (HDR + al - 1) & !(al - 1);
header_size + ((hdr.left_page() & 0xfff) as usize) + size
< hdr.data() as usize
} else {
HDR + ((hdr.left_page() & 0xfff) as usize) + 2 + size < 4096
}
}
fn truncate_left<T, K: Representable<T>, V: Representable<T>>(
page: &mut MutPage,
n: usize,
) -> Option<(*const K, *const V)> {
debug!("truncate_left {:?} {:?}", page, n);
if let Some(f) = fixed_size::<T, K, V>() {
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
// debug!("{:?} {:?} {:?}", new, hdr.n(), *n);
let hdr_n = header_mut(page).n();
unsafe {
let mut swap: core::mem::MaybeUninit<Tuple<K, V>> =
core::mem::MaybeUninit::uninit();
core::ptr::copy(
page.0.data.add(hdr_size + (n - 1) * f),
swap.as_mut_ptr() as *mut u8,
f,
);
core::ptr::copy(
page.0.data.add(hdr_size + n * f),
page.0.data.add(hdr_size),
(hdr_n as usize - n) * f,
);
core::ptr::copy(
swap.as_ptr() as *mut u8,
page.0.data.add(hdr_size + (hdr_n as usize - n) * f),
f,
);
}
debug!("{:?} - {:?}", hdr_n, n);
let hdr = header_mut(page);
hdr.set_n(hdr_n - n as u16);
hdr.decr(n * f);
unsafe {
Some(read::<T, K, V>(
page.0.data.add(hdr_size + (hdr_n as usize - n) * f),
))
}
} else {
let hdr_n = header_mut(page).n();
unsafe {
core::ptr::copy(
page.0.data.add(HDR + n * 2),
page.0.data.add(HDR),
(hdr_n as usize - n) * 2,
);
}
let deleted_offsets = unsafe {
core::slice::from_raw_parts(page.0.data.add(HDR) as *const u16, n as usize)
};
let deleted_size: u64 = deleted_offsets
.iter()
.map(|&off| {
2 + unsafe { entry_size::<T, K, V>(page.0.data.add(off as usize)) } as u64
})
.sum();
let hdr = header_mut(page);
hdr.set_n(hdr_n - n as u16);
hdr.decr(deleted_size as usize);
None
}
}
fn alloc(hdr: &mut Header, size: usize, align: usize) -> u16 {
let mut data = hdr.data() - size as u16;
data &= !((align - 1) as u16);
hdr.set_data(data);
hdr.set_n(hdr.n() + 1);
hdr.incr(size);
data
}
fn alloc_insert<T, K: Representable<T>, V: Representable<T>>(
new: &mut MutPage,
n: &mut isize,
size: usize,
_: u64,
) -> usize {
if let Some(f) = fixed_size::<T, K, V>() {
let al = K::ALIGN.max(V::ALIGN);
let hdr_size = (HDR + al - 1) & !(al - 1);
// debug!("{:?} {:?} {:?}", new, hdr.n(), *n);
let hdr_n = header_mut(new).n();
unsafe {
core::ptr::copy(
new.0.data.add(hdr_size + (*n as usize) * f),
new.0.data.add(hdr_size + (*n as usize) * f + f),
(hdr_n as usize - (*n as usize)) * f,
);
}
let hdr = header_mut(new);
hdr.set_n(hdr.n() + 1);
hdr.incr(f);
hdr_size + (*n as usize) * f
} else {
let (hdr_n, off_new) = {
let hdr = header_mut(new);
(
hdr.n(),
Self::alloc(&mut *hdr, 2 + size, K::ALIGN.max(V::ALIGN)),
)
};
unsafe {
core::ptr::copy(
new.0.data.add(HDR + (*n as usize) * 2),
new.0.data.add(HDR + (*n as usize) * 2 + 2),
(hdr_n as usize - (*n as usize)) * 2,
);
}
Self::set_offset(new, *n, 0, off_new);
off_new as usize
}
}
fn set_offset(new: &mut MutPage, n: isize, _: u64, off: u16) {
unsafe {
let ptr = new.0.data.offset(HDR as isize + n * 2) as *mut u16;
*ptr = off.to_le();
}
}
fn set_right_child(_: &mut MutPage, _: isize, _: u64) {}
type Offset = LeafOffset;
fn offset_slice<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page<'a>,
) -> Offsets<'a, Self::Offset> {
let hdr = header(page);
if fixed_size::<T, K, V>().is_some() {
Offsets::Range(0..(hdr.n() as usize))
} else {
unsafe {
Offsets::Slice(core::slice::from_raw_parts(
page.data.as_ptr().add(HDR) as *const LeafOffset,
hdr.n() as usize,
))
}
}
}
fn kv<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page,
(_, off): (u64, u16),
) -> (&'a K, &'a V, u64) {
unsafe {
let (k, v) = read::<T, K, V>(page.data.as_ptr().add(off as usize));
(&*k, &*v, 0)
}
}
}
impl Alloc for Internal {
fn extra_size<T, K: Representable<T>, V: Representable<T>>() -> usize {
8
}
fn can_alloc<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
(HDR as usize) + (hdr.n() as usize) * 8 + 8 + size < hdr.data() as usize
}
fn can_compact<T, K: Representable<T>, V: Representable<T>>(hdr: &Header, size: usize) -> bool {
(HDR as usize) + ((hdr.left_page() & 0xfff) as usize) + 8 + size < 4096
}
fn truncate_left<T, K: Representable<T>, V: Representable<T>>(
page: &mut MutPage,
n: usize,
) -> Option<(*const K, *const V)> {
// The following line copies the left child of the last entry
// (hence the `- 8` and `- 1`)
let hdr_n = header_mut(page).n();
unsafe {
core::ptr::copy(
page.0.data.add(HDR + (n - 1) * 8),
page.0.data.add(HDR - 8),
(hdr_n as usize - n + 1) * 8,
);
}
let size = if let Some(f) = fixed_size::<T, K, V>() {
((8 + f) * (hdr_n as usize - n)) as u64
} else {
let offsets = unsafe {
core::slice::from_raw_parts(
page.0.data.add(HDR + n * 8) as *const u16,
hdr_n as usize - n as usize,
)
};
offsets
.iter()
.map(|&off| {
8 + unsafe { entry_size::<T, K, V>(page.0.data.add(off as usize)) } as u64
})
.sum()
};
let hdr = header_mut(page);
hdr.set_n(hdr_n - n as u16);
debug!("truncate_left {:?} {:?}", hdr.left_page(), size);
hdr.set_occupied(size);
None
}
fn alloc(hdr: &mut Header, size: usize, align: usize) -> u16 {
let mut data = hdr.data() - size as u16;
data -= data % (align as u16);
hdr.set_data(data);
hdr.set_n(hdr.n() + 1);
hdr.incr(size);
data
}
fn alloc_insert<T, K: Representable<T>, V: Representable<T>>(
new: &mut MutPage,
n: &mut isize,
size: usize,
r: u64,
) -> usize {
let (hdr_n, off_new) = {
let hdr = header_mut(new);
(
hdr.n(),
Self::alloc(&mut *hdr, size, K::ALIGN.max(V::ALIGN)),
)
};
unsafe {
core::ptr::copy(
new.0.data.add(HDR + (*n as usize) * 8),
new.0.data.add(HDR + (*n as usize) * 8 + 8),
(hdr_n as usize - (*n as usize)) * 8,
);
}
Self::set_offset(new, *n, r, off_new);
off_new as usize
}
fn set_offset(new: &mut MutPage, n: isize, r: u64, off: u16) {
unsafe {
let ptr = new.0.data.offset(HDR as isize + n * 8) as *mut u64;
*ptr = (r | off as u64).to_le();
}
}
fn set_right_child(page: &mut MutPage, n: isize, r: u64) {
unsafe {
let ptr = page.0.data.offset(HDR as isize + n * 8) as *mut u64;
let off = u64::from_le(*ptr) & 0xfff;
*ptr = (r | off as u64).to_le();
}
}
type Offset = InternalOffset;
fn offset_slice<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page<'a>,
) -> Offsets<'a, Self::Offset> {
let hdr = header(page);
unsafe {
Offsets::Slice(core::slice::from_raw_parts(
page.data.as_ptr().add(HDR) as *const InternalOffset,
hdr.n() as usize,
))
}
}
fn kv<'a, T, K: Representable<T>, V: Representable<T>>(
page: crate::Page,
(r, off): (u64, u16),
) -> (&'a K, &'a V, u64) {
unsafe {
let (k, v) = read::<T, K, V>(page.data.as_ptr().add(off as usize));
(&*k, &*v, r)
}
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct LeafOffset(u16);
impl Into<(u64, u16)> for LeafOffset {
fn into(self) -> (u64, u16) {
(0, self.0)
}
}
impl Into<usize> for LeafOffset {
fn into(self) -> usize {
self.0 as usize
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct InternalOffset(u64);
impl Into<(u64, u16)> for InternalOffset {
fn into(self) -> (u64, u16) {
(self.0 & !0xfff, (self.0 & 0xfff) as u16)
}
}
impl Into<usize> for InternalOffset {
fn into(self) -> usize {
self.0 as usize
}
}
// Mark the replacement for deletion in the leaf. This is all
// lazy, so we don't do anything for now, in order to avoid
// duplicate work in case the page can be merged or
// rebalanced.
let ref mut curs0 = unsafe { cursor.stack[cursor.pointer].assume_init() };
let (c0, c1) = P::split_at(curs0.page.as_page(), curs0.cursor.as_ref().unwrap());
let mut last_op = ModifiedPage {
page: curs0.page,
mutable: cursor.pointer < cursor.first_rc_level,
c0,
l: 0,
r: 0,
ins: None,
ins2: None,
c1,
skip_first: true,
};
if cursor.pointer == cursor.first_rc_level {
debug!("decr_rc");
txn.decr_rc(curs0.page.offset)?;
debug!("/decr_rc");
}
if cursor.pointer >= cursor.first_rc_level {
let mut c0 = c0.clone();
let mut c1 = c1.clone();
P::move_next(curs0.page.as_page(), &mut c1);
while let Some((k, v, _)) =
P::next(curs0.page.as_page(), &mut c0).or_else(|| P::next(curs0.page.as_page(), &mut c1))
{
for o in k.page_offsets().chain(v.page_offsets()) {
txn.incr_rc(o)?;
}
}
}
debug!("pointer = {:?}", cursor.pointer);
let mut k0 = MaybeUninit::uninit();
let mut v0 = MaybeUninit::uninit();
if p0 < cursor.pointer {
// increase the RC of the replacement.
unsafe {
let (k, v, _) = P::unchecked_current(curs0.page.as_page(), &c0);
if cursor.pointer >= cursor.first_rc_level {
for o in (&*k).page_offsets().chain((&*v).page_offsets()) {
txn.incr_rc(o)?;
}
}
core::ptr::copy_nonoverlapping(k, k0.as_mut_ptr(), 1);
core::ptr::copy_nonoverlapping(v, v0.as_mut_ptr(), 1);
}
}
// Mark the replacement for deletion in the leaf, and copy it into
// (k0, v0) if we're in an internal node.
let (mut last_op, k0, v0) = leaf_delete(txn, cursor, p0)?;
let mut concat = {
let p = cursor.pointer;
let curs = cursor.current_mut();
let kv = if p == p0 {
unsafe { Some((
&*k0.as_ptr(),
&*v0.as_ptr()
)) }
} else {
None
};
concat(txn, curs, kv, last_op)?
};
let curs = cursor.current();
let c = curs.cursor.as_ref().unwrap();
let (c0, c1) = P::split_at(curs.page.as_page(), c);
debug!("page = {:?}, c0 = {:?}, c1 = {:?}", curs.page.offset, c0, c1);
debug!("concat = {:?}" ,concat);
let mut concat = concat(txn, cursor, p0, &k0, &v0, last_op)?;
debug!("concat = {:?}", concat);
match merge {
Op::Merged {
page,
freed,
marker: _,
} => {
// If we're deleting at an internal node, the
// replacement has already been included in the merged
// page.
last_op = ModifiedPage {
page: curs.page,
mutable: cursor.pointer < cursor.first_rc_level,
c0,
l: page.0.offset,
r: 0,
ins: None,
ins2: None,
c1,
skip_first: true,
};
if cursor.pointer < cursor.first_rc_level {
free[cursor.pointer] = freed;
} else {
if cursor.pointer == cursor.first_rc_level {
txn.decr_rc(curs.page.offset)?;
}
modify_rc(txn, &last_op)?;
}
}
Op::Rebalanced { k, v, l, r, freed } => {
// If we're deleting at an internal node, the
// replacement is already in pages `l` or `r`.
last_op = ModifiedPage {
page: curs.page,
mutable: cursor.pointer < cursor.first_rc_level,
c0,
l,
r,
ins: Some((k, v)),
ins2: None,
c1,
skip_first: true,
};
if cursor.pointer < cursor.first_rc_level {
free[cursor.pointer] = freed;
} else {
if cursor.pointer == cursor.first_rc_level {
txn.decr_rc(curs.page.offset)?;
}
modify_rc(txn, &last_op)?;
}
}
Op::Put(Put::Ok(Ok { page, freed })) => {
// Here, no merge, split or rebalance has happened. If
// we're deleting at an internal node (i.e. if
// cursor.pointer == p0), we need to mark the
// replacement here.
let (l, r, ins, skip_first) = if cursor.pointer == p0 {
let k = unsafe { &*k0.as_ptr() };
let v = unsafe { &*v0.as_ptr() };
(0, page.0.offset, Some((k, v)), true)
} else if concat.mod_is_left {
(page.0.offset, 0, None, false)
} else {
(0, page.0.offset, None, false)
};
last_op = ModifiedPage {
page: curs.page,
mutable: cursor.pointer < cursor.first_rc_level,
c0,
l,
r,
ins,
ins2: None,
c1,
skip_first,
};
if cursor.pointer < cursor.first_rc_level {
free[cursor.pointer][0] = freed;
} else {
if cursor.pointer == cursor.first_rc_level {
txn.decr_rc(curs.page.offset)?;
}
modify_rc(txn, &last_op)?;
}
}
Op::Put(Put::Split {
left,
right,
split_key,
split_value,
freed,
}) => {
let (ins, ins2, skip_first) = if cursor.pointer == p0 {
let k = unsafe { &*k0.as_ptr() };
let v = unsafe { &*v0.as_ptr() };
(Some((k, v)), Some((split_key, split_value)), true)
} else {
(Some((split_key, split_value)), None, false)
};
last_op = ModifiedPage {
page: curs.page,
mutable: cursor.pointer < cursor.first_rc_level,
c0,
l: left.0.offset,
r: right.0.offset,
ins,
ins2,
c1,
skip_first,
};
if cursor.pointer < cursor.first_rc_level {
free[cursor.pointer][0] = freed;
} else {
if cursor.pointer == cursor.first_rc_level {
txn.decr_rc(curs.page.offset)?;
}
modify_rc(txn, &last_op)?;
}
// If the split key is the replacement element, we
// have already increased its counter when we deleted
// it from its original position at the bottom of the
// tree.
if cursor.pointer + 1 >= cursor.first_rc_level && (split_key as *const K) != k0.as_ptr() {
for o in split_key.page_offsets().chain(split_value.page_offsets()) {
txn.incr_rc(o)?;
}
}
}
}
let mil = concat.mod_is_left;
last_op = handle_merge(txn, cursor, p0, &k0, &v0, mil, merge, &mut free)?;
debug!("modify {:?}", last_op);
match P::modify(txn, &mut last_op)? {
Put::Ok(Ok { page, freed }) => {
free[0][0] = freed;
db.db = page.0
}
Put::Split {
split_key,
split_value,
left,
right,
freed,
} => {
free[0][0] = freed;
let mut page = txn.alloc_page()?;
P::init(&mut page);
P::put(
txn,
page.0,
true,
&P::first_cursor(page.0.as_page()),
split_key,
split_value,
None,
left.0.offset,
right.0.offset,
)?;
if cursor.first_rc_level <= 1 {
for o in split_key.page_offsets().chain(split_value.page_offsets()) {
txn.incr_rc(o)?;
}
}
db.db = page.0
}
}
update_root(txn, db, last_op, &k0, cursor.first_rc_level <= 1, &mut free)?
}
fn leaf_delete<
'a,
T: AllocPage + LoadPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
P: BTreeMutPage<T, K, V> + core::fmt::Debug,
>(
txn: &mut T,
cursor: &mut Cursor<T, K, V, P>,
p0: usize,
) -> Result<(ModifiedPage<'a, T, K, V, P>, MaybeUninit<K>, MaybeUninit<V>), T::Error> {
let ref mut curs0 = unsafe { cursor.stack[cursor.pointer].assume_init() };
let (c0, c1) = P::split_at(curs0.page.as_page(), curs0.cursor.as_ref().unwrap());
if cursor.pointer == cursor.first_rc_level {
debug!("decr_rc");
txn.decr_rc(curs0.page.offset)?;
debug!("/decr_rc");
}
if cursor.pointer >= cursor.first_rc_level {
let mut c0 = c0.clone();
let mut c1 = c1.clone();
P::move_next(curs0.page.as_page(), &mut c1);
while let Some((k, v, _)) = P::next(curs0.page.as_page(), &mut c0)
.or_else(|| P::next(curs0.page.as_page(), &mut c1))
{
for o in k.page_offsets().chain(v.page_offsets()) {
txn.incr_rc(o)?;
}
}
}
debug!("pointer = {:?}", cursor.pointer);
let mut k0 = MaybeUninit::uninit();
let mut v0 = MaybeUninit::uninit();
if p0 < cursor.pointer {
// increase the RC of the replacement.
unsafe {
let (k, v, _) = P::unchecked_current(curs0.page.as_page(), &c0);
if cursor.pointer >= cursor.first_rc_level {
for o in (&*k).page_offsets().chain((&*v).page_offsets()) {
txn.incr_rc(o)?;
}
}
core::ptr::copy_nonoverlapping(k, k0.as_mut_ptr(), 1);
core::ptr::copy_nonoverlapping(v, v0.as_mut_ptr(), 1);
}
}
Ok((
ModifiedPage {
page: curs0.page,
mutable: cursor.pointer < cursor.first_rc_level,
c0,
l: 0,
r: 0,
ins: None,
ins2: None,
c1,
skip_first: true,
},
k0,
v0,
))
}
}
fn handle_merge<
'a,
T: AllocPage + LoadPage,
K: Representable<T>,
V: Representable<T>,
P: BTreeMutPage<T, K, V> + core::fmt::Debug,
>(
txn: &mut T,
cursor: &mut Cursor<T, K, V, P>,
p0: usize,
k0: &MaybeUninit<K>,
v0: &MaybeUninit<V>,
mod_is_left: bool,
merge: Op<'a, T, K, V>,
free: &mut [[u64; 2]; N_CURSORS],
) -> Result<ModifiedPage<'a, T, K, V, P>, T::Error> {
let mutable = cursor.pointer < cursor.first_rc_level;
let mut last_op = {
let curs = cursor.current_mut();
let c = curs.cursor.as_ref().unwrap();
let (c0, c1) = P::split_at(curs.page.as_page(), c);
ModifiedPage {
page: curs.page,
mutable,
c0,
l: 0,
r: 0,
ins: None,
ins2: None,
c1,
skip_first: true,
}
};
let freed = match merge {
Op::Merged {
page,
freed,
marker: _,
} => {
// If we're deleting at an internal node, the
// replacement has already been included in the merged
// page.
last_op.l = page.0.offset;
freed
}
Op::Rebalanced { k, v, l, r, freed } => {
// If we're deleting at an internal node, the
// replacement is already in pages `l` or `r`.
last_op.l = l;
last_op.r = r;
last_op.ins = Some((k, v));
freed
}
Op::Put(Put::Ok(Ok { page, freed })) => {
// No merge, split or rebalance has happened. If we're
// deleting at an internal node (i.e. if cursor.pointer ==
// p0), we need to mark the replacement here.
if cursor.pointer == p0 {
last_op.ins = unsafe { Some((&*k0.as_ptr(), &*v0.as_ptr())) };
last_op.r = page.0.offset;
} else {
last_op.skip_first = false;
if mod_is_left {
last_op.l = page.0.offset;
} else {
last_op.r = page.0.offset;
}
}
[freed, 0]
}
Op::Put(Put::Split {
left,
right,
split_key,
split_value,
freed,
}) => {
// This case only happens if `(K, V)` is not `Sized`, when
// either (1) a rebalance replaces a key/value pair with a
// larger one or (2) another split has happened in a page
// below.
if cursor.pointer == p0 {
// In this case, ins2 comes after ins, since the
// replacement is in the right child of the deleted
// key.
last_op.ins = unsafe { Some((&*k0.as_ptr(), &*v0.as_ptr())) };
last_op.ins2 = Some((split_key, split_value))
} else {
last_op.ins = Some((split_key, split_value));
last_op.skip_first = false
}
// The `l` and `r` fields are relative to `ins2` if
// `ins2.is_some()` or to `ins` else.
last_op.l = left.0.offset;
last_op.r = right.0.offset;
// If the split key is the replacement element, we have
// already increased its counter when we deleted it from
// its original position at the bottom of the tree.
//
// This can happen if we replaced an element and that
// replacement caused a split with the replacement as the
// middle element.
if cursor.pointer + 1 >= cursor.first_rc_level && (split_key as *const K) != k0.as_ptr()
{
for o in split_key.page_offsets().chain(split_value.page_offsets()) {
txn.incr_rc(o)?;
}
}
[freed, 0]
}
};
if cursor.pointer < cursor.first_rc_level {
free[cursor.pointer] = freed;
} else {
if cursor.pointer == cursor.first_rc_level {
txn.decr_rc(last_op.page.offset)?;
}
modify_rc(txn, &last_op)?;
}
Ok(())
}
fn update_root<
'a,
T: AllocPage + LoadPage + core::fmt::Debug,
K: Representable<T> + core::fmt::Debug,
V: Representable<T> + core::fmt::Debug,
P: BTreeMutPage<T, K, V> + core::fmt::Debug,
>(
txn: &mut T,
db: &mut Db_<T, K, V, P>,
mut last_op: ModifiedPage<T, K, V, P>,
k0: &MaybeUninit<K>,
is_rc: bool,
free: &mut [[u64; 2]; N_CURSORS],
) -> Result<(), T::Error> {
debug!("modify {:?}", last_op);
match P::modify(txn, &mut last_op)? {
Put::Ok(Ok { page, freed }) => {
free[0][0] = freed;
db.db = page.0
}
Put::Split {
split_key,
split_value,
left,
right,
freed,
} => {
free[0][0] = freed;
let mut page = txn.alloc_page()?;
P::init(&mut page);
P::put(
txn,
page.0,
true,
&P::first_cursor(page.0.as_page()),
split_key,
split_value,
None,
left.0.offset,
right.0.offset,
)?;
if is_rc && (split_key as *const K) != k0.as_ptr() {
for o in split_key.page_offsets().chain(split_value.page_offsets()) {
txn.incr_rc(o)?;
}
}
db.db = page.0
}