Skip to content

Commit

Permalink
Switched to StandardScaler from deprecated normalize in `NaturalG…
Browse files Browse the repository at this point in the history
…radient` (#8299)

* switched to `StandardScaler`

* Don't mention private methods in reno

* Fix typo

Co-authored-by: Julien Gacon <gaconju@gmail.com>
Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Jul 13, 2022
1 parent 400fadf commit 56501cc
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 6 deletions.
18 changes: 12 additions & 6 deletions qiskit/opflow/gradients/natural_gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def _ridge(
lambda4: right starting point for L-curve corner search
tol_search: termination threshold for regularization parameter search
fit_intercept: if True calculate intercept
normalize: deprecated if fit_intercept=False, if True normalize A for regression
normalize: ignored if fit_intercept=False, if True normalize A for regression
copy_a: if True A is copied, else overwritten
max_iter: max. number of iterations if solver is CG
tol: precision of the regression solution
Expand All @@ -369,11 +369,11 @@ def _ridge(
"""
from sklearn.linear_model import Ridge
from sklearn.preprocessing import StandardScaler

reg = Ridge(
alpha=lambda_,
fit_intercept=fit_intercept,
normalize=normalize,
copy_X=copy_a,
max_iter=max_iter,
tol=tol,
Expand All @@ -383,7 +383,10 @@ def _ridge(

def reg_method(a, c, alpha):
reg.set_params(alpha=alpha)
reg.fit(a, c)
if normalize:
reg.fit(StandardScaler().fit_transform(a), c)
else:
reg.fit(a, c)
return reg.coef_

lambda_mc, x_mc = NaturalGradient._reg_term_search(
Expand Down Expand Up @@ -425,7 +428,7 @@ def _lasso(
lambda4: right starting point for L-curve corner search
tol_search: termination threshold for regularization parameter search
fit_intercept: if True calculate intercept
normalize: deprecated if fit_intercept=False, if True normalize A for regression
normalize: ignored if fit_intercept=False, if True normalize A for regression
precompute: If True compute and use Gram matrix to speed up calculations.
Gram matrix can also be given explicitly
copy_a: if True A is copied, else overwritten
Expand All @@ -444,11 +447,11 @@ def _lasso(
"""
from sklearn.linear_model import Lasso
from sklearn.preprocessing import StandardScaler

reg = Lasso(
alpha=lambda_,
fit_intercept=fit_intercept,
normalize=normalize,
precompute=precompute,
copy_X=copy_a,
max_iter=max_iter,
Expand All @@ -461,7 +464,10 @@ def _lasso(

def reg_method(a, c, alpha):
reg.set_params(alpha=alpha)
reg.fit(a, c)
if normalize:
reg.fit(StandardScaler().fit_transform(a), c)
else:
reg.fit(a, c)
return reg.coef_

lambda_mc, x_mc = NaturalGradient._reg_term_search(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
fixes:
- |
Fix deprecation warnings in :class:`.NaturalGradient`, which now uses the
:class:`~sklearn.preprocessing.StandardScaler` to scale the data
before fitting the model if the ``normalize`` parameter is set to ``True``.

0 comments on commit 56501cc

Please sign in to comment.