use async_trait::*;
pub mod btree;
mod slice;
pub use slice::*;
pub const PAGE_SIZE: usize = 4096;
#[async_trait]
pub trait Storable: Send + Sync + core::fmt::Debug {
async fn compare<T: LoadPage>(&self, _txn: &T, _b: &Self) -> core::cmp::Ordering {
unimplemented!()
}
fn page_references(&self) -> Self::PageReferences;
async unsafe fn drop<T: AllocPage>(&self, txn: &mut T) -> Result<(), T::Error> {
for p in self.page_references() {
txn.decr_rc(p).await?;
}
Ok(())
}
type PageReferences: Iterator<Item = u64> + Send;
}
#[macro_export]
macro_rules! direct_repr {
($t: ty) => {
#[async_trait::async_trait]
impl $crate::Storable for $t {
type PageReferences = core::iter::Empty<u64>;
fn page_references(&self) -> Self::PageReferences {
core::iter::empty()
}
async fn compare<T: LoadPage>(&self, _txn: &T, b: &Self) -> core::cmp::Ordering {
self.cmp(b)
}
}
#[async_trait::async_trait]
impl $crate::UnsizedStorable for $t {
const ALIGN: usize = core::mem::align_of::<$t>();
fn size(&self) -> usize {
core::mem::size_of::<Self>()
}
unsafe fn onpage_size(_: *const u8) -> usize {
core::mem::size_of::<Self>()
}
async unsafe fn write_to_page<T: AllocPage>(&self, _txn: &mut T, p: Mut<u8>) {
core::ptr::copy_nonoverlapping(self, p.0 as *mut Self, 1)
}
async unsafe fn from_raw_ptr<'a, T>(_: &T, p: Const<u8>) -> &'a Self {
&*(p.0 as *const Self)
}
}
};
}
direct_repr!(());
direct_repr!(u8);
direct_repr!(i8);
direct_repr!(u16);
direct_repr!(i16);
direct_repr!(u32);
direct_repr!(i32);
direct_repr!(u64);
direct_repr!(i64);
direct_repr!([u8; 16]);
#[cfg(feature = "std")]
extern crate std;
#[cfg(feature = "std")]
direct_repr!(std::net::Ipv4Addr);
#[cfg(feature = "std")]
direct_repr!(std::net::Ipv6Addr);
#[cfg(feature = "std")]
direct_repr!(std::net::IpAddr);
#[cfg(feature = "std")]
direct_repr!(std::net::SocketAddr);
#[cfg(feature = "std")]
direct_repr!(std::time::SystemTime);
#[cfg(feature = "std")]
direct_repr!(std::time::Duration);
#[cfg(feature = "uuid")]
direct_repr!(uuid::Uuid);
#[cfg(feature = "ed25519")]
direct_repr!(ed25519_zebra::VerificationKeyBytes);
#[derive(Clone, Copy)]
pub struct Mut<T>(pub *mut T);
unsafe impl<T> Send for Mut<T> {}
impl<T> Mut<T> {
pub unsafe fn new(p: *mut T) -> Self {
Mut(p)
}
pub fn into_inner(self) -> *mut T {
self.0
}
}
#[derive(Clone, Copy)]
pub struct Const<T>(pub *const T);
unsafe impl<T> Send for Const<T> {}
impl<T> Const<T> {
pub unsafe fn new(p: *const T) -> Self {
Const(p)
}
pub fn into_inner(self) -> *const T {
self.0
}
}
#[async_trait]
pub trait UnsizedStorable: Storable {
const ALIGN: usize;
fn size(&self) -> usize;
unsafe fn onpage_size(_: *const u8) -> usize;
async unsafe fn write_to_page<T: AllocPage>(&self, txn: &mut T, p: Mut<u8>);
async unsafe fn from_raw_ptr<'a, T>(_: &T, p: Const<u8>) -> &'a Self;
}
#[async_trait]
impl Storable for [u8] {
type PageReferences = core::iter::Empty<u64>;
fn page_references(&self) -> Self::PageReferences {
core::iter::empty()
}
async fn compare<T: LoadPage>(&self, _txn: &T, b: &Self) -> core::cmp::Ordering {
self.cmp(b)
}
}
#[async_trait]
impl UnsizedStorable for [u8] {
const ALIGN: usize = 2;
fn size(&self) -> usize {
2 + self.len()
}
async unsafe fn from_raw_ptr<'a, T>(_: &T, p: Const<u8>) -> &'a Self {
let len = u16::from_le(*(p.0 as *const u16));
assert_ne!(len, 0);
assert_eq!(len & 0xf000, 0);
core::slice::from_raw_parts(p.0.add(2), len as usize)
}
unsafe fn onpage_size(p: *const u8) -> usize {
let len = u16::from_le(*(p as *const u16));
2 + len as usize
}
async unsafe fn write_to_page<T: AllocPage>(&self, _txn: &mut T, p: Mut<u8>) {
assert!(self.len() <= 510);
*(p.0 as *mut u16) = (self.len() as u16).to_le();
core::ptr::copy_nonoverlapping(self.as_ptr(), p.0.add(2), self.len())
}
}
unsafe fn read<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>(
k: *const u8,
) -> (*const u8, *const u8) {
let s = K::onpage_size(k);
let v = k.add(s);
let al = v.align_offset(V::ALIGN);
let v = v.add(al);
(k, v)
}
unsafe fn entry_size<K: UnsizedStorable + ?Sized, V: UnsizedStorable + ?Sized>(
k: *const u8,
) -> usize {
assert_eq!(k.align_offset(K::ALIGN), 0);
let ks = K::onpage_size(k);
let v_off = (ks + V::ALIGN - 1) & !(V::ALIGN - 1);
let v_ptr = k.add(v_off);
let vs = V::onpage_size(v_ptr);
let ka = K::ALIGN.max(V::ALIGN);
let size = v_off + vs;
(size + ka - 1) & !(ka - 1)
}
#[derive(Debug)]
#[repr(C)]
pub struct CowPage {
pub data: *mut u8,
pub offset: u64,
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct Page<'a> {
pub data: &'a [u8; PAGE_SIZE],
pub offset: u64,
}
impl CowPage {
pub fn as_page(&self) -> Page {
Page {
data: unsafe { &*(self.data as *const [u8; PAGE_SIZE]) },
offset: self.offset,
}
}
#[cfg(feature = "crc32")]
pub fn crc(&self, hasher: &crc32fast::Hasher) -> u32 {
let mut hasher = hasher.clone();
hasher.reset();
unsafe {
let x = [(*self.data) & 0xfe];
hasher.update(&x[..]);
hasher.update(core::slice::from_raw_parts(self.data.add(1), 3));
hasher.update(core::slice::from_raw_parts(self.data.add(8), PAGE_SIZE - 8));
}
hasher.finalize()
}
#[cfg(feature = "crc32")]
pub fn crc_check(&self, hasher: &crc32fast::Hasher) -> bool {
let crc = unsafe { u32::from_le(*(self.data as *const u32).add(1)) };
self.crc(hasher) == crc
}
}
#[derive(Debug)]
pub struct MutPage(pub CowPage);
#[cfg(not(feature = "crc32"))]
pub fn clear_dirty(data: &mut [u8]) {
data[0] &= 0xfe
}
#[cfg(feature = "crc32")]
pub fn clear_dirty(data: &mut [u8], hasher: &crc32fast::Hasher) {
unsafe {
data[0] &= 0xfe;
let crc = (self.0.data.as_mut_ptr() as *mut u32).add(1);
*crc = self.0.crc(hasher)
}
}
#[cfg(not(feature = "crc32"))]
pub unsafe fn clear_dirty_(data: *mut u8) {
*data &= 0xfe
}
#[cfg(feature = "crc32")]
pub unsafe fn clear_dirty_(data: *mut u8, hasher: &crc32fast::Hasher) {
*data &= 0xfe;
let crc = (data as *mut u32).add(1);
*crc = self.0.crc(hasher)
}
impl MutPage {
#[cfg(not(feature = "crc32"))]
pub fn clear_dirty(&mut self) {
unsafe { *self.0.data &= 0xfe }
}
#[cfg(feature = "crc32")]
pub fn clear_dirty(&mut self, hasher: &crc32fast::Hasher) {
unsafe {
*self.0.data &= 0xfe;
let crc = (self.0.data as *mut u32).add(1);
*crc = self.0.crc(hasher)
}
}
}
unsafe impl Sync for CowPage {}
unsafe impl Send for CowPage {}
impl CowPage {
pub fn is_dirty(&self) -> bool {
unsafe { (*self.data) & 1 != 0 }
}
}
#[async_trait]
pub trait LoadPage: Send + Sync {
type Error: core::fmt::Debug + Send;
async unsafe fn load_page(&self, off: u64) -> Result<CowPage, Self::Error>;
async unsafe fn load_page_contiguous(
&self,
_off: u64,
_length: u64,
) -> Result<CowPage, Self::Error> {
unimplemented!()
}
async fn rc(&self, _off: u64) -> Result<u64, Self::Error> {
Ok(0)
}
}
#[async_trait]
pub trait AllocPage: LoadPage {
async unsafe fn alloc_page(&mut self) -> Result<MutPage, Self::Error>;
async unsafe fn alloc_page_nodirty(&mut self) -> Result<MutPage, Self::Error> {
unimplemented!()
}
async unsafe fn alloc_contiguous(&mut self, length: u64) -> Result<MutPage, Self::Error>;
async fn incr_rc(&mut self, off: u64) -> Result<usize, Self::Error>;
async unsafe fn decr_rc(&mut self, off: u64) -> Result<usize, Self::Error>;
async unsafe fn decr_rc_owned(&mut self, off: u64) -> Result<usize, Self::Error>;
}