Skip to content

Commit

Permalink
cleanup, remove unused functions, clippy shenanigans
Browse files Browse the repository at this point in the history
  • Loading branch information
pront committed Oct 4, 2023
1 parent de5b38b commit f2a79ee
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 60 deletions.
2 changes: 1 addition & 1 deletion lib/codecs/tests/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ fn roundtrip_current_native_proto_fixtures() {

/// The event proto file was changed in v0.24. This test ensures we can still load the old version
/// binary and that when serialized and deserialized in the new format we still get the same event.
#[ignore]
#[test]
fn reserialize_pre_v24_native_json_fixtures() {
roundtrip_fixtures(
Expand Down Expand Up @@ -217,6 +216,7 @@ fn load_deserialize(path: &Path, deserializer: &dyn Deserializer) -> (Bytes, Eve
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
let buf = Bytes::from(buf);

// Ensure that we can parse the json fixture successfully
let mut events = deserializer
.parse(buf.clone(), LogNamespace::Legacy)
Expand Down
7 changes: 5 additions & 2 deletions lib/codecs/tests/native_json.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use bytes::BytesMut;

use codecs::decoding::format::Deserializer;
use codecs::encoding::format::Serializer;
use codecs::{NativeJsonDeserializerConfig, NativeJsonSerializerConfig};
Expand Down Expand Up @@ -35,7 +34,11 @@ fn histogram_metric_roundtrip() {
MetricValue::AggregatedHistogram {
count: 1,
sum: 1.0,
buckets: buckets!(f64::NEG_INFINITY => 10 , f64::MIN => 10, 1.5 => 10, f64::MAX => 10, f64::INFINITY => 10),
buckets: buckets!(
f64::NEG_INFINITY => 10 ,
f64::MIN => 10, 1.5 => 10,
f64::MAX => 10,
f64::INFINITY => 10),
},
));

Expand Down
68 changes: 21 additions & 47 deletions lib/vector-core/src/event/metric/value.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
use core::fmt;
use std::collections::BTreeSet;
use std::fmt::Debug;
use std::num::{ParseFloatError, ParseIntError};
use std::str::FromStr;

use serde::{Deserialize, Deserializer, Serialize, Serializer};

Expand Down Expand Up @@ -454,7 +451,7 @@ impl fmt::Display for MetricValue {
} => {
write!(fmt, "count={count} sum={sum} ")?;
write_list(fmt, " ", buckets, |fmt, bucket| {
fmt::Display::fmt(&bucket, fmt)
write!(fmt, "{}@{}", bucket.count, bucket.upper_limit)
})
}
MetricValue::AggregatedSummary {
Expand Down Expand Up @@ -603,39 +600,26 @@ impl ByteSizeOf for Sample {
}
}

/// A histogram bucket.
///
/// Histogram buckets represent the `count` of observations where the value of the observations does
/// not exceed the specified `upper_limit`.
#[configurable_component(no_deser, no_ser)]
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct Bucket {
/// The upper limit of values in the bucket.
#[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
pub upper_limit: f64,

/// The number of values tracked in this bucket.
pub count: u64,
}

/// Custom serialization function which converts special `f64` values to strings.
/// Non-special values are serialized as numbers.
#[allow(clippy::trivially_copy_pass_by_ref)]
fn serialize_f64<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
//println!("serializing {}", value);
if value.is_infinite() || value.is_nan() {
serializer.serialize_str(&format!("{}", value))
serializer.serialize_str(&format!("{value}"))
} else {
serializer.serialize_f64(*value)
}
}

/// Custom deserialization function for handling special f64 values.
fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
where
D: Deserializer<'de>,
{
let value: serde_json::Value = Deserialize::deserialize(deserializer)?;
//println!("deserializing {}", value);
match value {
serde_json::Value::Number(num) => num
.as_f64()
Expand All @@ -650,6 +634,21 @@ where
}
}

/// A histogram bucket.
///
/// Histogram buckets represent the `count` of observations where the value of the observations does
/// not exceed the specified `upper_limit`.
#[configurable_component(no_deser, no_ser)]
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct Bucket {
/// The upper limit of values in the bucket.
#[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
pub upper_limit: f64,

/// The number of values tracked in this bucket.
pub count: u64,
}

impl PartialEq for Bucket {
fn eq(&self, other: &Self) -> bool {
self.count == other.count && float_eq(self.upper_limit, other.upper_limit)
Expand All @@ -662,31 +661,6 @@ impl ByteSizeOf for Bucket {
}
}

impl fmt::Display for Bucket {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}@{}", self.count, self.upper_limit)
}
}

impl FromStr for Bucket {
type Err = vector_common::Error;

fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('@');
let count: u64 = parts
.next()
.ok_or_else(|| "Missing count".to_string())?
.parse()
.map_err(|err: ParseIntError| err.to_string())?;
let upper_limit: f64 = parts
.next()
.ok_or_else(|| "Missing upper limit".to_string())?
.parse()
.map_err(|err: ParseFloatError| err.to_string())?;
Ok(Self { upper_limit, count })
}
}

/// A single quantile observation.
///
/// Quantiles themselves are "cut points dividing the range of a probability distribution into
Expand Down
16 changes: 6 additions & 10 deletions src/transforms/metric_to_log.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
use std::collections::{BTreeMap, BTreeSet};

use chrono::Utc;
use serde_json::Value;
use vrl::path::OwnedValuePath;
use vrl::value::kind::Collection;
use vrl::value::Kind;

use codecs::MetricTagValues;
use lookup::{event_path, owned_value_path, path, PathPrefix};
use serde_json::Value;
use std::collections::{BTreeMap, BTreeSet};
use vector_common::TimeZone;
use vector_config::configurable_component;
use vector_core::config::LogNamespace;
use vrl::path::OwnedValuePath;
use vrl::value::kind::Collection;
use vrl::value::Kind;

use crate::config::OutputId;
use crate::{
Expand Down Expand Up @@ -355,19 +353,17 @@ mod tests {
use similar_asserts::assert_eq;
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;

use vector_common::config::ComponentKey;
use vector_core::{event::EventMetadata, metric_tags};

use super::*;
use crate::event::{
metric::{MetricKind, MetricTags, MetricValue, StatisticKind, TagValue, TagValueSet},
Metric, Value,
};
use crate::test_util::{components::assert_transform_compliance, random_string};
use crate::transforms::test::create_topology;

use super::*;

#[test]
fn generate_config() {
crate::test_util::test_generate_config::<MetricToLogConfig>();
Expand Down

0 comments on commit f2a79ee

Please sign in to comment.