Skip to content

Commit

Permalink
Merge pull request redpanda-data#24564 from IoannisRP/ik-pandaproxy-r…
Browse files Browse the repository at this point in the history
…efactor

pandaproxy: add missing internal metrics
  • Loading branch information
michael-redpanda authored Dec 17, 2024
2 parents ac58586 + 939face commit 0075d03
Show file tree
Hide file tree
Showing 2 changed files with 96 additions and 70 deletions.
165 changes: 96 additions & 69 deletions src/v/pandaproxy/probe.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

#include <seastar/core/metrics.hh>
#include <seastar/core/metrics_registration.hh>
#include <seastar/util/bool_class.hh>

namespace pandaproxy {

Expand All @@ -26,82 +27,108 @@ probe::probe(
, _group_name(group_name)
, _metrics() {
setup_metrics();
setup_public_metrics();
}

void probe::setup_metrics() {
namespace sm = ss::metrics;

if (config::shard_local_cfg().disable_metrics()) {
return;
using is_internal = ss::bool_class<struct is_internal_tag>;

struct Labels {
sm::label_instance label;
std::vector<sm::label> agg;
sm::label status;
};
const auto make_labels = [this](const is_internal internal) -> Labels {
const auto make_label =
[](const ss::sstring& key, const is_internal internal) {
return internal ? sm::label(key)
: metrics::make_namespaced_label(key);
};
const auto operation_label = make_label("operation", internal);
const auto agg = internal ? std::vector<sm::label>{sm::shard_label}
: std::vector<sm::label>{
sm::shard_label, operation_label};
const auto status = make_label("status", internal);
return {
.label = operation_label(_path.operations.nickname),
.agg = agg,
.status = status};
};

const auto internal_labels = make_labels(is_internal::yes);
const auto public_labels = make_labels(is_internal::no);

const auto make_internal_request_latency = [this](const Labels& l) {
return sm::make_histogram(
"request_latency",
sm::description("Request latency"),
{l.label},
[this] {
return _request_metrics.hist().internal_histogram_logform();
});
};

const auto make_public_request_latency = [this](const Labels& l) {
return sm::make_histogram(
"request_latency_seconds",
sm::description(
ssx::sformat("Internal latency of request for {}", _group_name)),
{l.label},
[this] {
return _request_metrics.hist().public_histogram_logform();
});
};

const auto make_request_errors_total_5xx = [this](const Labels& l) {
return sm::make_counter(
"request_errors_total",
[this] { return _request_metrics._5xx_count; },
sm::description(
ssx::sformat("Total number of {} server errors", _group_name)),
{l.label, l.status("5xx")});
};

const auto make_request_errors_total_4xx = [this](const Labels& l) {
return sm::make_counter(
"request_errors_total",
[this] { return _request_metrics._4xx_count; },
sm::description(
ssx::sformat("Total number of {} client errors", _group_name)),
{l.label, l.status("4xx")});
};

const auto make_request_errors_total_3xx = [this](const Labels& l) {
return sm::make_counter(
"request_errors_total",
[this] { return _request_metrics._3xx_count; },
sm::description(
ssx::sformat("Total number of {} redirection errors", _group_name)),
{l.label, l.status("3xx")});
};

if (!config::shard_local_cfg().disable_metrics()) {
_metrics.add_group(
"pandaproxy",
{make_internal_request_latency(internal_labels),
make_request_errors_total_5xx(internal_labels),
make_request_errors_total_4xx(internal_labels),
make_request_errors_total_3xx(internal_labels)},
{},
internal_labels.agg);
}

auto operation_label = sm::label("operation");
std::vector<sm::label_instance> labels{
operation_label(_path.operations.nickname)};

_metrics.add_group(
"pandaproxy",
{sm::make_histogram(
"request_latency",
sm::description("Request latency"),
labels,
[this] {
return _request_metrics.hist().internal_histogram_logform();
})},
{},
{sm::shard_label});
}

void probe::setup_public_metrics() {
namespace sm = ss::metrics;

if (config::shard_local_cfg().disable_public_metrics()) {
return;
if (!config::shard_local_cfg().disable_public_metrics()) {
_public_metrics.add_group(
_group_name,
{make_public_request_latency(public_labels)
.aggregate(public_labels.agg),
make_request_errors_total_5xx(public_labels)
.aggregate(public_labels.agg),
make_request_errors_total_4xx(public_labels)
.aggregate(public_labels.agg),
make_request_errors_total_3xx(public_labels)
.aggregate(public_labels.agg)});
}

auto operation_label = metrics::make_namespaced_label("operation");
auto status_label = metrics::make_namespaced_label("status");

std::vector<sm::label_instance> labels{
operation_label(_path.operations.nickname)};

auto aggregate_labels = std::vector<sm::label>{
sm::shard_label, operation_label};

_public_metrics.add_group(
_group_name,
{sm::make_histogram(
"request_latency_seconds",
sm::description(
ssx::sformat("Internal latency of request for {}", _group_name)),
labels,
[this] { return _request_metrics.hist().public_histogram_logform(); })
.aggregate(aggregate_labels),

sm::make_counter(
"request_errors_total",
[this] { return _request_metrics._5xx_count; },
sm::description(
ssx::sformat("Total number of {} server errors", _group_name)),
{operation_label(_path.operations.nickname), status_label("5xx")})
.aggregate(aggregate_labels),

sm::make_counter(
"request_errors_total",
[this] { return _request_metrics._4xx_count; },
sm::description(
ssx::sformat("Total number of {} client errors", _group_name)),
{operation_label(_path.operations.nickname), status_label("4xx")})
.aggregate(aggregate_labels),

sm::make_counter(
"request_errors_total",
[this] { return _request_metrics._3xx_count; },
sm::description(
ssx::sformat("Total number of {} redirection errors", _group_name)),
{operation_label(_path.operations.nickname), status_label("3xx")})
.aggregate(aggregate_labels)});
}

} // namespace pandaproxy
1 change: 0 additions & 1 deletion src/v/pandaproxy/probe.h
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ class probe {

private:
void setup_metrics();
void setup_public_metrics();

private:
http_status_metric _request_metrics;
Expand Down

0 comments on commit 0075d03

Please sign in to comment.