From d1b6b77d5e8c6f925b407f3585ddfb17cb7702e1 Mon Sep 17 00:00:00 2001 From: Samuel Marks <807580+SamuelMarks@users.noreply.github.com> Date: Wed, 12 Apr 2023 20:36:28 -0400 Subject: [PATCH 1/2] [keras/layers/activation/leaky_relu.py,keras/layers/activation/relu.py,keras/layers/activation/softmax.py] Standardise docstring usage of "Default to" --- keras/layers/activation/leaky_relu.py | 2 +- keras/layers/activation/relu.py | 10 +++++----- keras/layers/activation/softmax.py | 5 +++-- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/keras/layers/activation/leaky_relu.py b/keras/layers/activation/leaky_relu.py index 4e3217d5d5b..bc82ed5edc4 100644 --- a/keras/layers/activation/leaky_relu.py +++ b/keras/layers/activation/leaky_relu.py @@ -54,7 +54,7 @@ class LeakyReLU(Layer): Same shape as the input. Args: - alpha: Float >= 0. Negative slope coefficient. Default to 0.3. + alpha: Float >= 0. Negative slope coefficient. Defaults to `0.3`. """ diff --git a/keras/layers/activation/relu.py b/keras/layers/activation/relu.py index a63e368cba5..58bb09d113b 100644 --- a/keras/layers/activation/relu.py +++ b/keras/layers/activation/relu.py @@ -65,11 +65,11 @@ class ReLU(Layer): Same shape as the input. Args: - max_value: Float >= 0. Maximum activation value. Default to None, which - means unlimited. - negative_slope: Float >= 0. Negative slope coefficient. Default to 0. - threshold: Float >= 0. Threshold value for thresholded activation. Default - to 0. + max_value: Float >= 0. Maximum activation value. None means unlimited. + Defaults to `None`. + negative_slope: Float >= 0. Negative slope coefficient. Defaults to `0.`. + threshold: Float >= 0. Threshold value for thresholded activation. + Defaults to `0.`. """ def __init__( diff --git a/keras/layers/activation/softmax.py b/keras/layers/activation/softmax.py index d1c0e04aca9..cc9e86e544a 100644 --- a/keras/layers/activation/softmax.py +++ b/keras/layers/activation/softmax.py @@ -72,8 +72,9 @@ class Softmax(Layer): normalization is applied. Call arguments: inputs: The inputs, or logits to the softmax layer. - mask: A boolean mask of the same shape as `inputs`. Defaults to `None`. - The mask specifies 1 to keep and 0 to mask. + mask: A boolean mask of the same shape as `inputs`. The mask + specifies 1 to keep and 0 to mask. Defaults to `None`. + Returns: softmaxed output with the same shape as `inputs`. From 46449dca70b6cdc164097e073c6b6359a8dba47d Mon Sep 17 00:00:00 2001 From: Samuel Marks <807580+SamuelMarks@users.noreply.github.com> Date: Wed, 19 Apr 2023 22:43:03 -0400 Subject: [PATCH 2/2] [keras/layers/activation/relu.py] Use backticks for defaults in docstrings --- keras/layers/activation/relu.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/keras/layers/activation/relu.py b/keras/layers/activation/relu.py index 58bb09d113b..0065bc4a98d 100644 --- a/keras/layers/activation/relu.py +++ b/keras/layers/activation/relu.py @@ -65,10 +65,10 @@ class ReLU(Layer): Same shape as the input. Args: - max_value: Float >= 0. Maximum activation value. None means unlimited. + max_value: Float >= `0.`. Maximum activation value. `None` means unlimited. Defaults to `None`. - negative_slope: Float >= 0. Negative slope coefficient. Defaults to `0.`. - threshold: Float >= 0. Threshold value for thresholded activation. + negative_slope: Float >= `0.`. Negative slope coefficient. Defaults to `0.`. + threshold: Float >= `0.`. Threshold value for thresholded activation. Defaults to `0.`. """