Skip to content

Commit

Permalink
Merge pull request #178 from hanabi1224/reduce_alloc_in_copy_to_overlay
Browse files Browse the repository at this point in the history
Reduce alloc in copy_to_overlay
  • Loading branch information
arkpar authored Feb 6, 2023
2 parents f8136c5 + 3d40344 commit b4af249
Show file tree
Hide file tree
Showing 11 changed files with 136 additions and 73 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ snap = "1"
loom = { version = "0.5.1", optional = true }

[dev-dependencies]
env_logger = "0.9.0"
env_logger = "0.10.0"
fdlimit = "0.2.1"
rand = { version = "0.8.2", features = ["small_rng"] }
tempfile = "3.2"
Expand Down
16 changes: 8 additions & 8 deletions fuzz/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ pub trait DbSimulator {
};
Self::reset_model_from_database(&db.db, &mut layers, &old_layers);
},
Action::IterPrev =>
Action::IterPrev => {
if let Some(iter) = &mut db.iter {
let mut old_key = if let Some(old_key) = db.iter_current_key.take() {
old_key
Expand All @@ -250,15 +250,14 @@ pub trait DbSimulator {
old_key,
expected
);
assert!(expected.contains(&new_key_value), "Prev lookup on iterator with old position {:?}, expecting one of {:?}, found {:?}",
old_key,
expected, new_key_value);
assert!(expected.contains(&new_key_value), "{}", "Prev lookup on iterator with old position {old_key:?}, expecting one of {expected:?}, found {new_key_value:?}");
db.iter_current_key = Some(
new_key_value
.map_or(IterPosition::Start, |(k, _)| IterPosition::Value(k[0])),
);
},
Action::IterNext =>
}
},
Action::IterNext => {
if let Some(iter) = &mut db.iter {
let mut old_key = if let Some(old_key) = db.iter_current_key.take() {
old_key
Expand All @@ -282,12 +281,13 @@ pub trait DbSimulator {
old_key,
expected
);
assert!(expected.contains(&new_key_value), "Next lookup on iterator with old position {:?}, expecting one of {:?}, found {:?}", old_key, expected, new_key_value);
assert!(expected.contains(&new_key_value), "{}", "Next lookup on iterator with old position {old_key:?}, expecting one of {expected:?}, found {new_key_value:?}");
db.iter_current_key = Some(
new_key_value
.map_or(IterPosition::End, |(k, _)| IterPosition::Value(k[0])),
);
},
}
},
}
retry_operation(|| Self::check_db_and_model_are_equals(&db.db, &layers)).unwrap();
}
Expand Down
3 changes: 2 additions & 1 deletion src/btree/btree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use super::*;
use crate::{
btree::BTreeTable,
column::Column,
db::{RcKey, RcValue},
error::Result,
log::{LogQuery, LogWriter},
table::key::TableKeyQuery,
Expand Down Expand Up @@ -35,7 +36,7 @@ impl BTree {

pub fn write_sorted_changes(
&mut self,
mut changes: &[Operation<Vec<u8>, Vec<u8>>],
mut changes: &[Operation<RcKey, RcValue>],
btree: TablesRef,
log: &mut LogWriter,
) -> Result<()> {
Expand Down
34 changes: 19 additions & 15 deletions src/btree/iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,38 +146,39 @@ impl<'a> BTreeIterator<'a> {
self.next_backend(record_id, self.table, &*log, direction)?
};
let result = match (next_commit_overlay, next_backend) {
(Some((commit_key, commit_value)), Some((backend_key, backend_value))) =>
match (direction, commit_key.cmp(&backend_key)) {
(Some((commit_key, commit_value)), Some((backend_key, backend_value))) => {
match (direction, commit_key.value().cmp(&backend_key)) {
(IterDirection::Backward, std::cmp::Ordering::Greater) |
(IterDirection::Forward, std::cmp::Ordering::Less) => {
self.pending_backend = Some(PendingBackend {
next_item: Some((backend_key, backend_value)),
direction,
});
if let Some(value) = commit_value {
Some((commit_key, value))
Some((commit_key.value().clone(), value.value().clone()))
} else {
self.last_key = LastKey::At(commit_key);
self.last_key = LastKey::At(commit_key.value().clone());
continue
}
},
(IterDirection::Backward, std::cmp::Ordering::Less) |
(IterDirection::Forward, std::cmp::Ordering::Greater) => Some((backend_key, backend_value)),
(_, std::cmp::Ordering::Equal) =>
if let Some(value) = commit_value {
Some((backend_key, value))
Some((backend_key, value.value().clone()))
} else {
self.last_key = LastKey::At(commit_key);
self.last_key = LastKey::At(commit_key.value().clone());
continue
},
},
}
},
(Some((commit_key, Some(commit_value))), None) => {
self.pending_backend = Some(PendingBackend { next_item: None, direction });
Some((commit_key, commit_value))
Some((commit_key.value().clone(), commit_value.value().clone()))
},
(Some((k, None)), None) => {
self.pending_backend = Some(PendingBackend { next_item: None, direction });
self.last_key = LastKey::At(k);
self.last_key = LastKey::At(k.value().clone());
continue
},
(None, Some((backend_key, backend_value))) => Some((backend_key, backend_value)),
Expand Down Expand Up @@ -328,29 +329,32 @@ impl BTreeIterState {
(IterDirection::Forward, LastIndex::At(sep)) if is_leaf =>
LastIndex::At(*sep + 1),
(IterDirection::Forward, LastIndex::At(sep)) => LastIndex::Descend(*sep + 1),
(IterDirection::Forward, LastIndex::Before(sep)) if *sep == ORDER =>
(IterDirection::Forward, LastIndex::Before(sep)) if *sep == ORDER => {
if self.exit(direction) {
break
} else {
continue
},
}
},
(IterDirection::Forward, LastIndex::Before(sep)) => LastIndex::At(*sep),

(IterDirection::Backward, LastIndex::At(sep)) if is_leaf && *sep == 0 =>
(IterDirection::Backward, LastIndex::At(sep)) if is_leaf && *sep == 0 => {
if self.exit(direction) {
break
} else {
continue
},
}
},
(IterDirection::Backward, LastIndex::At(sep)) if is_leaf =>
LastIndex::At(*sep - 1),
(IterDirection::Backward, LastIndex::At(sep)) => LastIndex::Descend(*sep),
(IterDirection::Backward, LastIndex::Before(sep)) if *sep == 0 =>
(IterDirection::Backward, LastIndex::Before(sep)) if *sep == 0 => {
if self.exit(direction) {
break
} else {
continue
},
}
},
(IterDirection::Backward, LastIndex::Before(sep)) => LastIndex::At(*sep - 1),
};
match next {
Expand Down
18 changes: 11 additions & 7 deletions src/btree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,24 +357,28 @@ pub mod commit_overlay {
use super::*;
use crate::{
column::{ColId, Column},
db::{BTreeCommitOverlay, Operation},
db::{BTreeCommitOverlay, Operation, RcKey, RcValue},
error::Result,
};

#[derive(Debug)]
pub struct BTreeChangeSet {
pub col: ColId,
pub changes: Vec<Operation<Vec<u8>, Vec<u8>>>,
pub changes: Vec<Operation<RcKey, RcValue>>,
}

impl BTreeChangeSet {
pub fn new(col: ColId) -> Self {
BTreeChangeSet { col, changes: Default::default() }
}

pub fn push(&mut self, change: Operation<Vec<u8>, Vec<u8>>) {
pub fn push(&mut self, change: Operation<Value, Value>) {
// No key hashing
self.changes.push(change);
self.changes.push(match change {
Operation::Set(k, v) => Operation::Set(k.into(), v.into()),
Operation::Dereference(k) => Operation::Dereference(k.into()),
Operation::Reference(k) => Operation::Reference(k.into()),
});
}

pub fn copy_to_overlay(
Expand All @@ -388,16 +392,16 @@ pub mod commit_overlay {
for change in self.changes.iter() {
match change {
Operation::Set(key, value) => {
*bytes += key.len();
*bytes += value.len();
*bytes += key.value().len();
*bytes += value.value().len();
overlay.insert(key.clone(), (record_id, Some(value.clone())));
},
Operation::Dereference(key) => {
// Don't add removed ref-counted values to overlay.
// (current ref_counted implementation does not
// make much sense for btree indexed content).
if !ref_counted {
*bytes += key.len();
*bytes += key.value().len();
overlay.insert(key.clone(), (record_id, None));
}
},
Expand Down
17 changes: 9 additions & 8 deletions src/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use super::{
};
use crate::{
column::Column,
db::{RcKey, RcValue},
error::Result,
index::Address,
log::{LogQuery, LogWriter},
Expand Down Expand Up @@ -55,7 +56,7 @@ impl Node {
&mut self,
parent: Option<(&mut Self, usize)>,
depth: u32,
changes: &mut &[Operation<Vec<u8>, Vec<u8>>],
changes: &mut &[Operation<RcKey, RcValue>],
btree: TablesRef,
log: &mut LogWriter,
) -> Result<(Option<(Separator, Child)>, bool)> {
Expand All @@ -70,7 +71,7 @@ impl Node {
}
let r = match &changes[0] {
Operation::Set(key, value) =>
self.insert(depth, key, value, changes, btree, log)?,
self.insert(depth, key.value(), value.value(), changes, btree, log)?,
_ => self.on_existing(depth, changes, btree, log)?,
};
if r.0.is_some() || r.1 {
Expand All @@ -81,12 +82,12 @@ impl Node {
}
if let Some((parent, p)) = &parent {
let key = &changes[1].key();
let (at, i) = self.position(key)?; // TODO could start position from current
let (at, i) = self.position(key.value())?; // TODO could start position from current
if at || i < self.number_separator() {
*changes = &changes[1..];
continue
}
let (at, i) = parent.position(key)?;
let (at, i) = parent.position(key.value())?;
if !at && &i == p && i < parent.number_separator() {
*changes = &changes[1..];
continue
Expand All @@ -105,7 +106,7 @@ impl Node {
depth: u32,
key: &[u8],
value: &[u8],
changes: &mut &[Operation<Vec<u8>, Vec<u8>>],
changes: &mut &[Operation<RcKey, RcValue>],
btree: TablesRef,
log: &mut LogWriter,
) -> Result<(Option<(Separator, Child)>, bool)> {
Expand Down Expand Up @@ -235,18 +236,18 @@ impl Node {
fn on_existing(
&mut self,
depth: u32,
changes: &mut &[Operation<Vec<u8>, Vec<u8>>],
changes: &mut &[Operation<RcKey, RcValue>],
values: TablesRef,
log: &mut LogWriter,
) -> Result<(Option<(Separator, Child)>, bool)> {
let change = &changes[0];
let key = change.key();
let has_child = depth != 0;
let (at, i) = self.position(key)?;
let (at, i) = self.position(key.value())?;
if at {
let existing = self.separator_address(i);
if let Some(existing) = existing {
if Column::write_existing_value_plan::<_, Vec<u8>>(
if Column::write_existing_value_plan::<_, RcValue>(
&TableKey::NoHash,
values,
existing,
Expand Down
9 changes: 5 additions & 4 deletions src/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
use crate::{
btree::BTreeTable,
compress::Compress,
db::{check::CheckDisplay, Operation},
db::{check::CheckDisplay, Operation, RcValue},
display::hex,
error::{Error, Result},
index::{Address, IndexTable, PlanOutcome, TableId as IndexTableId},
Expand Down Expand Up @@ -498,7 +498,7 @@ impl HashColumn {

pub fn write_plan(
&self,
change: &Operation<Key, Vec<u8>>,
change: &Operation<Key, RcValue>,
log: &mut LogWriter,
) -> Result<PlanOutcome> {
let tables = self.tables.upgradable_read();
Expand All @@ -509,7 +509,8 @@ impl HashColumn {
} else {
match change {
Operation::Set(key, value) => {
let (r, _, _) = self.write_plan_new(tables, reindex, key, value, log)?;
let (r, _, _) =
self.write_plan_new(tables, reindex, key, value.as_ref(), log)?;
Ok(r)
},
Operation::Dereference(key) => {
Expand All @@ -534,7 +535,7 @@ impl HashColumn {
fn write_plan_existing(
&self,
tables: &Tables,
change: &Operation<Key, Vec<u8>>,
change: &Operation<Key, RcValue>,
log: &mut LogWriter,
index: &IndexTable,
sub_index: usize,
Expand Down
Loading

0 comments on commit b4af249

Please sign in to comment.