diff --git a/include/fdeep/layers/softmax_layer.hpp b/include/fdeep/layers/softmax_layer.hpp index c0565888..0a1b5476 100644 --- a/include/fdeep/layers/softmax_layer.hpp +++ b/include/fdeep/layers/softmax_layer.hpp @@ -49,8 +49,8 @@ class softmax_layer : public activation_layer const auto log_sum_shifted = std::log(sum_shifted); for (size_t z_class = 0; z_class < input.shape().depth_; ++z_class) { - output.set(0, 0, y, x, z_class, - std::exp(inp_shifted.get(0, 0, y, x, z_class) - log_sum_shifted)); + const auto result = std::exp(inp_shifted.get(0, 0, y, x, z_class) - log_sum_shifted); + output.set(0, 0, y, x, z_class, std::isinf(result) ? static_cast(0) : result); } } }