diff --git a/keras/src/metrics/probabilistic_metrics.py b/keras/src/metrics/probabilistic_metrics.py index 1abcd55623f..2f719d84630 100644 --- a/keras/src/metrics/probabilistic_metrics.py +++ b/keras/src/metrics/probabilistic_metrics.py @@ -69,9 +69,7 @@ class Poisson(reduction_metrics.MeanMetricWrapper): name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. - Example: - - Example: + Examples: >>> m = keras.metrics.Poisson() >>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]]) @@ -119,9 +117,7 @@ class BinaryCrossentropy(reduction_metrics.MeanMetricWrapper): e.g. `label_smoothing=0.2` means that we will use a value of 0.1 for label "0" and 0.9 for label "1". - Example: - - Example: + Examples: >>> m = keras.metrics.BinaryCrossentropy() >>> m.update_state([[0, 1], [0, 0]], [[0.6, 0.4], [0.4, 0.6]]) @@ -195,9 +191,7 @@ class CategoricalCrossentropy(reduction_metrics.MeanMetricWrapper): axis: (Optional) Defaults to `-1`. The dimension along which entropy is computed. - Example: - - Example: + Examples: >>> # EPSILON = 1e-7, y = y_true, y` = y_pred >>> # y` = clip_ops.clip_by_value(output, EPSILON, 1. - EPSILON) @@ -282,9 +276,7 @@ class SparseCategoricalCrossentropy(reduction_metrics.MeanMetricWrapper): axis: (Optional) Defaults to `-1`. The dimension along which entropy is computed. - Example: - - Example: + Examples: >>> # y_true = one_hot(y_true) = [[0, 1, 0], [0, 0, 1]] >>> # logits = log(y_pred)