Skip to content

Commit

Permalink
use re_tracing everywhere
Browse files Browse the repository at this point in the history
  • Loading branch information
teh-cmc committed May 31, 2023
1 parent 532f87b commit 7f60016
Show file tree
Hide file tree
Showing 123 changed files with 351 additions and 586 deletions.
5 changes: 1 addition & 4 deletions crates/re_arrow_store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ core_benchmarks_only = []
re_format.workspace = true
re_log_types.workspace = true
re_log.workspace = true
re_tracing.workspace = true

# External dependencies:
ahash.workspace = true
Expand All @@ -48,10 +49,6 @@ parking_lot.workspace = true
smallvec.workspace = true
thiserror.workspace = true

# Native dependencies:
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
puffin.workspace = true

# Optional dependencies:
polars-core = { workspace = true, optional = true, features = [
"diagonal_concat",
Expand Down
22 changes: 0 additions & 22 deletions crates/re_arrow_store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,25 +56,3 @@ pub use re_log_types::{TimeInt, TimeRange, TimeType, Timeline}; // for politenes
pub mod external {
pub use arrow2;
}

// ---

/// Native-only profiling macro for puffin.
#[doc(hidden)]
#[macro_export]
macro_rules! profile_function {
($($arg: tt)*) => {
#[cfg(not(target_arch = "wasm32"))]
puffin::profile_function!($($arg)*);
};
}

/// Native-only profiling macro for puffin.
#[doc(hidden)]
#[macro_export]
macro_rules! profile_scope {
($($arg: tt)*) => {
#[cfg(not(target_arch = "wasm32"))]
puffin::profile_scope!($($arg)*);
};
}
2 changes: 1 addition & 1 deletion crates/re_arrow_store/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ impl DataStore {
///
/// Useful to call after a gc.
pub fn oldest_time_per_timeline(&self) -> BTreeMap<Timeline, TimeInt> {
crate::profile_function!();
re_tracing::profile_function!();

let mut oldest_time_per_timeline = BTreeMap::default();

Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_arrow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ impl IndexedBucket {
/// - `$cluster_key`
/// - rest of component columns in ascending lexical order
pub fn serialize(&self) -> DataTableResult<(Schema, Chunk<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let Self {
timeline,
Expand Down Expand Up @@ -63,7 +63,7 @@ impl PersistentIndexedTable {
/// - `$cluster_key`
/// - rest of component columns in ascending lexical order
pub fn serialize(&self) -> DataTableResult<(Schema, Chunk<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let Self {
ent_path: _,
Expand Down Expand Up @@ -95,7 +95,7 @@ fn serialize(
col_num_instances: &[u32],
table: &IntMap<ComponentName, DataCellColumn>,
) -> DataTableResult<(Schema, Chunk<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let mut schema = Schema::default();
let mut columns = Vec::new();
Expand Down Expand Up @@ -129,7 +129,7 @@ fn serialize_control_columns(
col_row_id: &[RowId],
col_num_instances: &[u32],
) -> DataTableResult<(Schema, Vec<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let mut schema = Schema::default();
let mut columns = Vec::new();
Expand Down Expand Up @@ -175,7 +175,7 @@ fn serialize_data_columns(
cluster_key: &ComponentName,
table: &IntMap<ComponentName, DataCellColumn>,
) -> DataTableResult<(Schema, Vec<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let mut schema = Schema::default();
let mut columns = Vec::new();
Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl DataStore {

fn dump_timeless_tables(&self) -> impl Iterator<Item = DataTable> + '_ {
self.timeless_tables.values().map(|table| {
crate::profile_scope!("timeless_table");
re_tracing::profile_scope!("timeless_table");

let PersistentIndexedTable {
ent_path,
Expand All @@ -58,10 +58,10 @@ impl DataStore {

fn dump_temporal_tables(&self) -> impl Iterator<Item = DataTable> + '_ {
self.tables.values().flat_map(|table| {
crate::profile_scope!("temporal_table");
re_tracing::profile_scope!("temporal_table");

table.buckets.values().map(move |bucket| {
crate::profile_scope!("temporal_bucket");
re_tracing::profile_scope!("temporal_bucket");

bucket.sort_indices_if_needed();

Expand Down Expand Up @@ -105,14 +105,14 @@ impl DataStore {
self.tables
.values()
.filter_map(move |table| {
crate::profile_scope!("temporal_table_filtered");
re_tracing::profile_scope!("temporal_table_filtered");

if table.timeline != timeline_filter {
return None;
}

Some(table.buckets.values().filter_map(move |bucket| {
crate::profile_scope!("temporal_bucket_filtered");
re_tracing::profile_scope!("temporal_bucket_filtered");

bucket.sort_indices_if_needed();

Expand Down
8 changes: 4 additions & 4 deletions crates/re_arrow_store/src/store_gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl DataStore {
//
// TODO(#1823): Workload specific optimizations.
pub fn gc(&mut self, target: GarbageCollectionTarget) -> (Vec<RowId>, DataStoreStats) {
crate::profile_function!();
re_tracing::profile_function!();

self.gc_id += 1;

Expand Down Expand Up @@ -126,7 +126,7 @@ impl DataStore {
///
/// Returns the list of `RowId`s that were purged from the store.
fn gc_drop_at_least_num_bytes(&mut self, mut num_bytes_to_drop: f64) -> Vec<RowId> {
crate::profile_function!();
re_tracing::profile_function!();

let mut row_ids = Vec::new();

Expand Down Expand Up @@ -165,7 +165,7 @@ impl IndexedTable {
///
/// Returns how many bytes were actually dropped, or zero if the row wasn't found.
fn try_drop_row(&mut self, row_id: RowId, time: i64) -> u64 {
crate::profile_function!();
re_tracing::profile_function!();

let table_has_more_than_one_bucket = self.buckets.len() > 1;

Expand Down Expand Up @@ -211,7 +211,7 @@ impl IndexedBucketInner {
///
/// Returns how many bytes were actually dropped, or zero if the row wasn't found.
fn try_drop_row(&mut self, row_id: RowId, time: i64) -> u64 {
crate::profile_function!();
re_tracing::profile_function!();

self.sort();

Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ impl DataStore {
where
for<'b> &'b C::ArrayType: IntoIterator,
{
crate::profile_function!();
re_tracing::profile_function!();

let (_, cells) = self.latest_at(query, entity_path, C::name(), &[C::name()])?;
let cell = cells.get(0)?.as_ref()?;
Expand Down Expand Up @@ -57,7 +57,7 @@ impl DataStore {
where
for<'b> &'b C::ArrayType: IntoIterator,
{
crate::profile_function!();
re_tracing::profile_function!();

let mut cur_path = Some(entity_path.clone());
while let Some(path) = cur_path {
Expand All @@ -84,7 +84,7 @@ impl DataStore {
where
for<'b> &'b C::ArrayType: IntoIterator,
{
crate::profile_function!();
re_tracing::profile_function!();

let query = LatestAtQuery::latest(Timeline::default());
self.query_latest_component(entity_path, &query)
Expand All @@ -103,7 +103,7 @@ impl DataStore {
timepoint: &TimePoint,
component: C,
) {
crate::profile_function!();
re_tracing::profile_function!();

let mut row = match DataRow::try_from_cells1(
RowId::random(),
Expand Down Expand Up @@ -140,7 +140,7 @@ impl DataStore {
timepoint: &TimePoint,
component: ComponentName,
) {
crate::profile_function!();
re_tracing::profile_function!();

if let Some(datatype) = self.lookup_datatype(&component) {
let cell = DataCell::from_arrow_empty(component, datatype.clone());
Expand Down
14 changes: 7 additions & 7 deletions crates/re_arrow_store/src/store_polars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl DataStore {
/// This cannot fail: it always tries to yield as much valuable information as it can, even in
/// the face of errors.
pub fn to_dataframe(&self) -> DataFrame {
crate::profile_function!();
re_tracing::profile_function!();

const TIMELESS_COL: &str = "_is_timeless";

Expand Down Expand Up @@ -167,7 +167,7 @@ impl PersistentIndexedTable {
/// This cannot fail: it always tries to yield as much valuable information as it can, even in
/// the face of errors.
pub fn to_dataframe(&self, store: &DataStore, config: &DataStoreConfig) -> DataFrame {
crate::profile_function!();
re_tracing::profile_function!();

let Self {
ent_path: _,
Expand Down Expand Up @@ -206,7 +206,7 @@ impl IndexedBucket {
/// This cannot fail: it always tries to yield as much valuable information as it can, even in
/// the face of errors.
pub fn to_dataframe(&self, store: &DataStore, config: &DataStoreConfig) -> DataFrame {
crate::profile_function!();
re_tracing::profile_function!();

let IndexedBucketInner {
is_sorted: _,
Expand Down Expand Up @@ -260,7 +260,7 @@ impl IndexedBucket {
// ---

fn insert_ids_as_series(col_insert_id: &InsertIdVec) -> Series {
crate::profile_function!();
re_tracing::profile_function!();

let insert_ids = arrow2::array::UInt64Array::from_slice(col_insert_id.as_slice());
new_infallible_series(
Expand All @@ -277,7 +277,7 @@ fn column_as_series(
component: ComponentName,
cells: &[Option<DataCell>],
) -> Series {
crate::profile_function!();
re_tracing::profile_function!();

// Computing the validity bitmap is just a matter of checking whether the data was
// available in the component tables.
Expand Down Expand Up @@ -316,7 +316,7 @@ fn column_as_series(
// ---

fn new_infallible_series(name: &str, data: &dyn Array, len: usize) -> Series {
crate::profile_function!();
re_tracing::profile_function!();

Series::try_from((name, data.as_ref().clean_for_polars())).unwrap_or_else(|_| {
let errs = Utf8Array::<i32>::from(vec![Some("<ERR>"); len]);
Expand All @@ -335,7 +335,7 @@ fn sort_df_columns(
store_insert_ids: bool,
timelines: &BTreeSet<&str>,
) -> DataFrame {
crate::profile_function!();
re_tracing::profile_function!();

let columns: Vec<_> = {
let mut all = df.get_column_names();
Expand Down
Loading

0 comments on commit 7f60016

Please sign in to comment.