QYDGYIZRNFRIQD7RUCY5YAN3F2THZA74E5UOHPIFWSULEJFAFVJQC
LSQ6V7M66TEGLJ7QBLRVDX4E7UKJTDQTEXZOS3KGPGFKVXNLPKBQC
UUUVNC4DWEEL7WV5IRPKPZ6HZMYCPA53XM7LJWICUD4E6GN37IRQC
OP6SVMOD2GTQ7VNJ4E5KYFG4MIYA7HBMXJTADALMZH4PY7OQRMZQC
SO25TWFLSRQIVTJTTSN77LO5FZQVQPIZTSBULH7MWBBDEWSK3OCAC
XEU2QVLCHPYOOD4TQIPEEVYOVSFMKFPLJYWEJYXYJAZ7S54KWDZAC
OTWDDJE7TTE73D6BGF4ZN6BH2NFUFLPME2VJ3CPALH463UGWLEIQC
H3FVSQIQGFCFKCPXVOSFHP4OSUOBBURJESCZNGQTNDAAD3WQSBEQC
6UVFCERMGSGNRWCVC3GWO5HWV6MSWE433DXBJVC7KRPP6LLJLCSQC
73Z2UB3JGRLFNFORE7D64O4IHIFSZASD4G4FLJ4FJLHANT75MGIAC
HN6Z5DU4WYMAIOOSNVHLIIMNF6Q53TNJ7YC27SLKWNXVYCTACQKQC
Q7DRIBBRE4MNG4NP3PVIXAJF5PQYLFWYIVK2O4VVLEO6XY3BOSFQC
WS4ZQM4RMIHZ6XZKSDQJGHN5SSSWFL4H236USOPUA33S6RC53RFAC
APPY2E7M5NHNC6MFYXSVEKJVAILK7YAZVTVE3W75EK2JNFVS3XBQC
DV4A2LR7Q5LAEGAQHLO34PZCHGJUHPAMRZFGT7GUFNKVQKPJNOYQC
6DCQHIFPEH4GZKSRRS32GMKDRPZH4MTCGOUEI7YEUVKWENBF3JWAC
QEUTVAZ4F4EJXRDMWDMYXF6XEDMX7YPVG4IIXEKPIR3K54E5W5OAC
OFINGD26ZWCRDVVDI2ZIBLMHXKEMJA6MRNLANJYUHQPIJLPA7J2AC
EAAYH6BQWDK52EC5RG3BEZQU3FJPN5RRRN4U5KDKDVPKXBVJMNDAC
NXMFNPZ7VWJRLC3M5QJJVTICXCMGE24F3HVIZA7A7RLVMLQMLDVQC
G4JEQLLX6Q7VVFVAEJZAVQXX33MQ36CSCYSMJ5NQM5VZ76DXKU6QC
YWFYZNLZ5JHLIFVBRKZK4TSWVPROUPRG77ZB5M7UHT2OKPL4ZSRQC
ONES3V466GLO5CXKRF5ENK7VFOQPWM3YXLVRGWB56V5SH3W7XNBQC
W26CFMAQOXMUK4ZOJMAN4SMBXMWFQHO7HCTEVW73FQSRMJZFGJIQC
KMT3MF5NLEQIPZLHCRYDGQ5EA46HJCG3C2ANEPMZGKGHDK77ADPAC
KX3WVNZW5KHVEH6EOQTZ4RBEFFJ3SGF5I467X3JWZ74PURRK4HVAC
LROAI3NBBSCU4T2YA6EHJYKKKL75AU5A7C7WIRCGIQ56S6HPLRXQC
UAQX27N4PI4LHEW6LSHJETIE5MV7JTEMPLTJFYUBMYVPC43H7VOAC
MSRWB47YP6L5BVTS53QQPBOHY5SXTSTR5KD6IIF35UWCTEUOCQWQC
EYNN7RLSFVBWDLRTLNNFUAF46Q6OX3BR5SUEJIOOHBSNP7FVBXGAC
YXKP4AIWDBIWBBUDWF66YIPG5ECMHNKEV3PX6KYXOVXY3EWG3WGQC
T7QB6QEPWBXAU3RL7LE4GRDWWNQ65ZU2YNNTWBYLORJOABAQFEZQC
pub trait Representable: core::fmt::Debug {
/// If this value is an offset to another page at offset `offset`,
/// return `Some(offset)`. Return `None` else.
fn page_offsets(&self) -> Self::PageOffsets;
const ALIGN: usize;
/// Some datastructures expect this to be at least the memory size
/// of `Self` (as returned by `core::mem::size_of::<Self>()`). For
/// example, the sized implementation of B trees sometimes
/// allocates an instance of `Self` on the stack, and copy it from
/// the
const SIZE: Option<usize>;
/// If `Self::SIZE.is_some()`, this must return the same
/// value. The default implementation is `Self;:SIZE.unwrap()`.
fn size(&self) -> usize {
Self::SIZE.unwrap()
}
/// Form a pointer to `Self` from a raw pointer. The purpose of
/// this function is that the output can be a slim or a fat
/// pointer (like for `&[u8]`).
unsafe fn from_raw_ptr<'a, T: LoadPage>(txn: &T, p: *const u8) -> &'a Self;
/// Read the size from an on-page entry. If `Self::SIZE.is_some()`
/// this must be the same value.
unsafe fn onpage_size(_: *const u8) -> usize {
Self::SIZE.unwrap()
}
/// Write to a page. Must not overwrite the allocated size, but
/// this isn't checkde (which is why it's unsafe).
unsafe fn write_to_page(&self, p: *mut u8);
pub trait Storable: core::fmt::Debug {
unsafe fn from_raw_ptr<'a, T>(_: &T, p: *const u8) -> &'a Self {
&*(p as *const Self)
}
impl UnsizedStorable for $t {
const ALIGN: usize = core::mem::align_of::<$t>();
/// If `Self::SIZE.is_some()`, this must return the same
/// value. The default implementation is `Self;:SIZE.unwrap()`.
fn size(&self) -> usize {
core::mem::size_of::<Self>()
impl Representable for [u8] {
/// Types that can be stored on disk.
pub trait UnsizedStorable: Storable {
const ALIGN: usize;
/// If `Self::SIZE.is_some()`, this must return the same
/// value. The default implementation is `Self;:SIZE.unwrap()`.
fn size(&self) -> usize;
/// Read the size from an on-page entry. If `Self::SIZE.is_some()`
/// this must be the same value.
unsafe fn onpage_size(_: *const u8) -> usize;
/// Write to a page. Must not overwrite the allocated size, but
/// this isn't checked (which is why it's unsafe).
unsafe fn write_to_page(&self, p: *mut u8);
unsafe fn from_raw_ptr<'a, T>(_: &T, p: *const u8) -> &'a Self;
}
impl Storable for [u8] {
let (k, v) = read::<T, K, V>(txn, page.data.as_ptr().add(off as usize));
Some((
K::from_raw_ptr(txn, k as *const u8),
V::from_raw_ptr(txn, v as *const u8),
0,
))
let kv = &*(page.data.as_ptr().add(off as usize) as *const Tuple<K, V>);
Some((&kv.k, &kv.v, 0))
let (k, v) = read::<T, K, V>(txn, page.data.as_ptr().add((off & 0xfff) as usize));
Some((
K::from_raw_ptr(txn, k as *const u8),
V::from_raw_ptr(txn, v as *const u8),
off & !0xfff,
))
let kv = &*(page.data.as_ptr().add((off as usize) & 0xfff) as *const Tuple<K, V>);
Some((&kv.k, &kv.v, off & !0xfff))
unsafe {
if c.is_leaf {
core::mem::size_of::<Tuple<K, V>>()
} else {
8 + entry_size::<K, V>(page.data.as_ptr().add(
(u64::from_le(*(page.data.as_ptr().add(HDR) as *const u64).add(c.cur as usize))
& 0xfff) as usize,
))
}
if c.is_leaf {
core::mem::size_of::<Tuple<K, V>>()
} else {
8 + core::mem::size_of::<Tuple<K, V>>()
impl<K: Representable + core::fmt::Debug, V: Representable + core::fmt::Debug>
super::BTreeMutPage<K, V> for Page<K, V>
impl<K: Storable + core::fmt::Debug, V: Storable + core::fmt::Debug> super::BTreeMutPage<K, V>
for Page<K, V>
total += extra + crate::alloc_size(k, v) as usize;
if let Some((k, v)) = m.ins2 {
total += extra + crate::alloc_size(k, v) as usize;
total += extra + core::mem::size_of::<Tuple<K, V>>();
if m.ins2.is_some() {
total += extra + core::mem::size_of::<Tuple<K, V>>()
let (k, v) = read::<T, K, V>(txn, p.as_ptr().offset(off as isize & 0xfff));
let k = K::from_raw_ptr(txn, k);
match k.compare(txn, k0) {
let tup = &*(p.as_ptr().offset(off as isize & 0xfff) as *const Tuple<K, V>);
match tup.k.compare(txn, k0) {
fn modify<
T: LoadPage,
K: Representable + core::fmt::Debug,
V: Representable + core::fmt::Debug,
L: Alloc,
>(
fn modify<T: LoadPage, K: Storable + core::fmt::Debug, V: Storable + core::fmt::Debug, L: Alloc>(
fn merge<
T: LoadPage,
K: Representable + core::fmt::Debug,
V: Representable + core::fmt::Debug,
L: Alloc,
>(
fn merge<T: LoadPage, K: Storable + core::fmt::Debug, V: Storable + core::fmt::Debug, L: Alloc>(
let new_ptr = new.0.data.add(off_new as usize);
core::ptr::copy_nonoverlapping(k0, new_ptr as *mut K, 1);
let ks = k0.size();
let v_ptr = new_ptr.add((ks + V::ALIGN - 1) & !(V::ALIGN - 1));
core::ptr::copy_nonoverlapping(v0, v_ptr as *mut V, 1);
let new_ptr = &mut *(new.0.data.add(off_new as usize) as *mut Tuple<K, V>);
core::ptr::copy_nonoverlapping(k0, &mut new_ptr.k, 1);
core::ptr::copy_nonoverlapping(v0, &mut new_ptr.v, 1);
debug!("allocated {:?} {:?} {:?}", new_ptr, l, r);
fn can_alloc<K: Representable, V: Representable>(hdr: &Header, size: usize) -> bool;
fn can_compact<K: Representable, V: Representable>(hdr: &Header, size: usize) -> bool;
fn can_alloc<K: Storable, V: Storable>(hdr: &Header, size: usize) -> bool;
fn can_compact<K: Storable, V: Storable>(hdr: &Header, size: usize) -> bool;
fn alloc_insert<K: Representable, V: Representable>(
new: &mut MutPage,
n: &mut isize,
size: usize,
r: u64,
) -> usize;
fn alloc_insert<K: Storable, V: Storable>(new: &mut MutPage, n: &mut isize, r: u64) -> usize;
fn can_compact<K: Representable, V: Representable>(hdr: &Header, size: usize) -> bool {
let al = K::ALIGN.max(V::ALIGN);
fn can_compact<K: Storable, V: Storable>(hdr: &Header, size: usize) -> bool {
let al = core::mem::align_of::<Tuple<K, V>>();
let (k, v) = read::<T, K, V>(txn, page.0.data.add(hdr_size + (hdr_n as usize - n) * f));
Some((K::from_raw_ptr(txn, k), V::from_raw_ptr(txn, v)))
let tup =
&*(page.0.data.add(hdr_size + (hdr_n as usize - n) * f) as *const Tuple<K, V>);
Some((&tup.k, &tup.v))
fn alloc_insert<K: Representable, V: Representable>(
new: &mut MutPage,
n: &mut isize,
size: usize,
_: u64,
) -> usize {
fn alloc_insert<K: Storable, V: Storable>(new: &mut MutPage, n: &mut isize, _: u64) -> usize {
let (k, v) = read::<T, K, V>(txn, page.data.as_ptr().add(off as usize));
(K::from_raw_ptr(txn, k), V::from_raw_ptr(txn, v), 0)
let tup = &*(page.data.as_ptr().add(off as usize) as *const Tuple<K, V>);
debug!(">>>>>>>> kv {:?} {:?}", off, tup);
(&tup.k, &tup.v, 0)
fn alloc_insert<K: Representable, V: Representable>(
new: &mut MutPage,
n: &mut isize,
size: usize,
r: u64,
) -> usize {
fn alloc_insert<K: Storable, V: Storable>(new: &mut MutPage, n: &mut isize, r: u64) -> usize {
let size = core::mem::size_of::<Tuple<K, V>>();
debug!("alloc internal {:?} {:?}", n, size);
pub struct PageIterator<
'a,
T: LoadPage,
K: Representable + ?Sized,
V: Representable + ?Sized,
P: BTreePage<K, V>,
> {
pub struct PageIterator<'a, T: LoadPage, K: ?Sized, V: ?Sized, P: BTreePage<K, V>> {
impl<
'a,
T: LoadPage,
K: Representable + ?Sized + 'a,
V: Representable + ?Sized + 'a,
P: BTreePage<K, V>,
> Iterator for PageIterator<'a, T, K, V, P>
impl<'a, T: LoadPage, K: ?Sized + 'a, V: ?Sized + 'a, P: BTreePage<K, V>> Iterator
for PageIterator<'a, T, K, V, P>
pub trait BTreeMutPage<K: Representable + ?Sized, V: Representable + ?Sized>:
BTreePage<K, V> + core::fmt::Debug
{
pub trait BTreeMutPage<K: ?Sized, V: ?Sized>: BTreePage<K, V> + core::fmt::Debug {
pub struct ModifiedPage<
'a,
K: Representable + ?Sized,
V: Representable + ?Sized,
P: BTreePage<K, V>,
> {
pub struct ModifiedPage<'a, K: ?Sized, V: ?Sized, P: BTreePage<K, V>> {
pub fn create_db_<
T: AllocPage,
K: Representable + ?Sized,
V: Representable + ?Sized,
P: BTreeMutPage<K, V>,
>(
pub fn create_db_<T: AllocPage, K: ?Sized, V: ?Sized, P: BTreeMutPage<K, V>>(
pub fn create_db<
T: AllocPage + core::fmt::Debug,
K: Representable + core::fmt::Debug,
V: Representable + core::fmt::Debug,
>(
pub fn create_db<T: AllocPage, K: Storable, V: Storable>(
pub fn fork_db<
T: AllocPage,
K: Representable + ?Sized,
V: Representable + ?Sized,
P: BTreeMutPage<K, V>,
>(
pub fn fork_db<T: AllocPage, K: Storable + ?Sized, V: Storable + ?Sized, P: BTreeMutPage<K, V>>(