Skip to content

Commit

Permalink
feat(derive): more stage metrics (#326)
Browse files Browse the repository at this point in the history
  • Loading branch information
refcell authored Jun 27, 2024
1 parent d3f1493 commit 961d4a5
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 2 deletions.
2 changes: 1 addition & 1 deletion crates/derive/src/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ macro_rules! observe {

/// Sets a metric value.
#[macro_export]
macro_rules! metrics_set {
macro_rules! set {
($metric:ident, $value:expr) => {
#[cfg(feature = "metrics")]
$crate::metrics::$metric.set($value);
Expand Down
13 changes: 13 additions & 0 deletions crates/derive/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,19 @@ lazy_static! {
"Tracks the L1 origin for the L1 Traversal Stage"
).expect("Origin Gauge failed to register");

/// Tracks batch reader errors.
pub static ref BATCH_READER_ERRORS: CounterVec = register_counter_vec!(
"batch_reader_errors",
"Number of batch reader errors",
&["error"]
).expect("Batch Reader Errors failed to register");

/// Tracks the compression ratio of batches.
pub static ref BATCH_COMPRESSION_RATIO: IntGauge = register_int_gauge!(
"batch_compression_ratio",
"Compression ratio of batches"
).expect("Batch Compression Ratio failed to register");

/// Tracks the number of provider method calls.
pub static ref PROVIDER_CALLS: CounterVec = register_counter_vec!(
"provider_calls",
Expand Down
5 changes: 5 additions & 0 deletions crates/derive/src/stages/channel_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,12 +161,14 @@ impl BatchReader {
pub(crate) fn next_batch(&mut self, cfg: &RollupConfig) -> Option<Batch> {
// If the data is not already decompressed, decompress it.
let mut brotli_used = false;
let mut raw_len = 0;
if let Some(data) = self.data.take() {
// Peek at the data to determine the compression type.
if data.is_empty() {
warn!(target: "batch-reader", "Data is too short to determine compression type, skipping batch");
return None;
}
raw_len = data.len();
let compression_type = data[0];
if (compression_type & 0x0F) == ZLIB_DEFLATE_COMPRESSION_METHOD ||
(compression_type & 0x0F) == ZLIB_RESERVED_COMPRESSION_METHOD
Expand All @@ -177,18 +179,21 @@ impl BatchReader {
self.decompressed = decompress_brotli(&data[1..]).ok()?;
} else {
error!(target: "batch-reader", "Unsupported compression type: {:x}, skipping batch", compression_type);
crate::inc!(BATCH_READER_ERRORS, &["unsupported_compression_type"]);
return None;
}
}

// Decompress and RLP decode the batch data, before finally decoding the batch itself.
let decompressed_reader = &mut self.decompressed.as_slice()[self.cursor..].as_ref();
let bytes = Bytes::decode(decompressed_reader).ok()?;
crate::set!(BATCH_COMPRESSION_RATIO, (raw_len as i64) * 100 / bytes.len() as i64);
let batch = Batch::decode(&mut bytes.as_ref(), cfg).unwrap();

// Confirm that brotli decompression was performed *after* the Fjord hardfork.
if brotli_used && !cfg.is_fjord_active(batch.timestamp()) {
warn!(target: "batch-reader", "Brotli compression used before Fjord hardfork, skipping batch");
crate::inc!(BATCH_READER_ERRORS, &["brotli_used_before_fjord"]);
return None;
}

Expand Down
2 changes: 1 addition & 1 deletion crates/derive/src/stages/l1_traversal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ impl<F: ChainProvider + Send> OriginAdvancer for L1Traversal<F> {
return Err(StageError::SystemConfigUpdate(e));
}

crate::metrics_set!(ORIGIN_GAUGE, next_l1_origin.number as i64);
crate::set!(ORIGIN_GAUGE, next_l1_origin.number as i64);
self.block = Some(next_l1_origin);
self.done = false;
Ok(())
Expand Down

0 comments on commit 961d4a5

Please sign in to comment.