Skip to content
Snippets Groups Projects
Commit 06eca4f3 authored by Éloïs's avatar Éloïs
Browse files

ref(kv_typed): get_ref_slice: deser must be backend agnostic

parent 4a0a0b86
No related branches found
No related tags found
No related merge requests found
Pipeline #12712 passed
......@@ -49,7 +49,7 @@ pub trait BackendCol: 'static + Clone + Debug + Send + Sync {
k: &K,
f: F,
) -> KvResult<Option<D>>;
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[V::Elem]) -> KvResult<D>>(
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[u8]) -> KvResult<D>>(
&self,
k: &K,
f: F,
......
......@@ -162,7 +162,7 @@ impl BackendCol for LevelDbCol {
})
}
#[inline(always)]
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[V::Elem]) -> KvResult<D>>(
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[u8]) -> KvResult<D>>(
&self,
k: &K,
f: F,
......@@ -170,21 +170,7 @@ impl BackendCol for LevelDbCol {
k.as_bytes(|k_bytes| {
self.0
.get(ReadOptions::new(), k_bytes)?
.map(|bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(
&bytes[V::prefix_len()..],
)
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
.map(|bytes| f(&bytes))
.transpose()
})
}
......
......@@ -312,7 +312,7 @@ impl BackendCol for LmdbCol {
})
}
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[V::Elem]) -> KvResult<D>>(
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[u8]) -> KvResult<D>>(
&self,
k: &K,
f: F,
......@@ -323,21 +323,7 @@ impl BackendCol for LmdbCol {
access
.get::<_, [u8]>(&self.inner.tree, k_bytes)
.to_opt()?
.map(|bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(
&bytes[V::prefix_len()..],
)
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
.map(|bytes| f(bytes))
.transpose()
})
}
......
......@@ -129,31 +129,12 @@ impl BackendCol for MemCol {
})
}
#[inline(always)]
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[V::Elem]) -> KvResult<D>>(
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[u8]) -> KvResult<D>>(
&self,
k: &K,
f: F,
) -> KvResult<Option<D>> {
k.as_bytes(|k_bytes| {
self.tree
.get(k_bytes)
.map(|bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(
&bytes[V::prefix_len()..],
)
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
.transpose()
})
k.as_bytes(|k_bytes| self.tree.get(k_bytes).map(|bytes| f(bytes)).transpose())
}
#[inline(always)]
fn delete<K: Key>(&mut self, k: &K) -> KvResult<()> {
......
......@@ -113,27 +113,12 @@ impl BackendCol for MemCol {
.transpose()
}
#[inline(always)]
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[V::Elem]) -> KvResult<D>>(
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[u8]) -> KvResult<D>>(
&self,
_k: &K,
f: F,
) -> KvResult<Option<D>> {
self.0
.as_ref()
.map(|bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(&bytes[V::prefix_len()..])
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
.transpose()
self.0.as_ref().map(|bytes| f(bytes)).transpose()
}
#[inline(always)]
fn delete<K: Key>(&mut self, _k: &K) -> KvResult<()> {
......
......@@ -112,31 +112,12 @@ impl BackendCol for SledCol {
})
}
#[inline(always)]
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[V::Elem]) -> KvResult<D>>(
fn get_ref_slice<K: Key, V: ValueSliceZc, D, F: Fn(&[u8]) -> KvResult<D>>(
&self,
k: &K,
f: F,
) -> KvResult<Option<D>> {
k.as_bytes(|k_bytes| {
self.0
.get(k_bytes)?
.map(|bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(
&bytes[V::prefix_len()..],
)
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
.transpose()
})
k.as_bytes(|k_bytes| self.0.get(k_bytes)?.map(|bytes| f(&bytes)).transpose())
}
#[inline(always)]
fn delete<K: Key>(&mut self, k: &K) -> KvResult<()> {
......
......@@ -192,7 +192,19 @@ impl<V: ValueSliceZc, BC: BackendCol, E: EventTrait<V = V>> DbCollectionRoGetRef
f: F,
) -> KvResult<Option<D>> {
let r = self.inner.read();
r.backend_col.get_ref_slice::<E::K, V, D, F>(k, f)
r.backend_col.get_ref_slice::<E::K, V, D, _>(k, |bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(&bytes[V::prefix_len()..])
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
}
}
......
......@@ -137,7 +137,19 @@ impl<'tx, V: ValueSliceZc, BC: BackendCol, E: EventTrait<V = V>> TxColRo<'tx, BC
) -> KvResult<Option<D>> {
self.col_reader
.backend_col
.get_ref_slice::<E::K, V, D, F>(k, f)
.get_ref_slice::<E::K, V, D, _>(k, |bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(&bytes[V::prefix_len()..])
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
}
}
......
......@@ -61,7 +61,19 @@ impl<'tx, V: ValueSliceZc, BC: BackendCol, E: EventTrait<V = V>> TxColRw<'tx, BC
) -> KvResult<Option<D>> {
self.col_reader
.backend_col
.get_ref_slice::<E::K, V, D, F>(k, f)
.get_ref_slice::<E::K, V, D, _>(k, |bytes| {
if bytes.is_empty() {
f(&[])
} else if let Some(layout_verified) =
zerocopy::LayoutVerified::<_, [V::Elem]>::new_slice(&bytes[V::prefix_len()..])
{
f(&layout_verified)
} else {
Err(KvError::DeserError(
"Bytes are invalid length or alignment.".into(),
))
}
})
}
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment