diff --git a/pl_bolts/models/regression/logistic_regression.py b/pl_bolts/models/regression/logistic_regression.py index 21178f5e17..f283c8182b 100644 --- a/pl_bolts/models/regression/logistic_regression.py +++ b/pl_bolts/models/regression/logistic_regression.py @@ -53,7 +53,7 @@ def training_step(self, batch, batch_idx): # flatten any input x = x.view(x.size(0), -1) - y_hat = self(x) + y_hat = self.linear(x) # PyTorch cross_entropy function combines log_softmax and nll_loss in single function loss = F.cross_entropy(y_hat, y, reduction='sum')