Skip to content

Commit

Permalink
[ENH] pull in warning for dialect change from pandas-gbq.
Browse files Browse the repository at this point in the history
* Add comment linking to pandas-gbq issue for change in default
  dialect.
* Add versionchanged to read_gbq dialect.
  • Loading branch information
tswast committed Sep 17, 2018
1 parent 9e2039b commit cd18b7d
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 5 deletions.
6 changes: 3 additions & 3 deletions doc/source/whatsnew/v0.24.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,9 @@ Other Enhancements
- :meth:`Series.droplevel` and :meth:`DataFrame.droplevel` are now implemented (:issue:`20342`)
- Added support for reading from Google Cloud Storage via the ``gcsfs`` library (:issue:`19454`)
- :func:`to_gbq` and :func:`read_gbq` signature and documentation updated to
reflect changes from the `Pandas-GBQ library version 0.5.0
<https://pandas-gbq.readthedocs.io/en/latest/changelog.html#changelog-0-5-0>`__.
(:issue:`21627`)
reflect changes from the `Pandas-GBQ library version 0.6.0
<https://pandas-gbq.readthedocs.io/en/latest/changelog.html#changelog-0-6-0>`__.
(:issue:`21627`, :issue:`22557`)
- New method :meth:`HDFStore.walk` will recursively walk the group hierarchy of an HDF5 file (:issue:`10932`)
- :func:`read_html` copies cell data across ``colspan`` and ``rowspan``, and it treats all-``th`` table rows as headers if ``header`` kwarg is not given and there is no ``thead`` (:issue:`17054`)
- :meth:`Series.nlargest`, :meth:`Series.nsmallest`, :meth:`DataFrame.nlargest`, and :meth:`DataFrame.nsmallest` now accept the value ``"all"`` for the ``keep`` argument. This keeps all ties for the nth largest/smallest value (:issue:`16818`)
Expand Down
19 changes: 18 additions & 1 deletion pandas/io/gbq.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
""" Google BigQuery support """

import warnings


def _try_import():
# since pandas is a dependency of pandas-gbq
Expand All @@ -23,7 +25,7 @@ def _try_import():

def read_gbq(query, project_id=None, index_col=None, col_order=None,
reauth=False, private_key=None, auth_local_webserver=False,
dialect='legacy', location=None, configuration=None,
dialect=None, location=None, configuration=None,
verbose=None):
"""
Load data from Google BigQuery.
Expand Down Expand Up @@ -65,6 +67,8 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
*New in version 0.2.0 of pandas-gbq*.
dialect : str, default 'legacy'
Note: The default value is changing to 'standard' in a future verion.
SQL syntax dialect to use. Value can be one of:
``'legacy'``
Expand All @@ -76,6 +80,8 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
compliant with the SQL 2011 standard. For more information
see `BigQuery Standard SQL Reference
<https://cloud.google.com/bigquery/docs/reference/standard-sql/>`__.
.. versionchanged:: 0.24.0
location : str, optional
Location where the query job should run. See the `BigQuery locations
documentation
Expand Down Expand Up @@ -108,6 +114,17 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
pandas.DataFrame.to_gbq : Write a DataFrame to Google BigQuery.
"""
pandas_gbq = _try_import()

if dialect is None:
dialect = "legacy"
warnings.warn(
'The default value for dialect is changing to "standard" in a '
'future version of pandas-gbq. Pass in dialect="legacy" to '
"disable this warning.",
FutureWarning,
stacklevel=2,
)

return pandas_gbq.read_gbq(
query, project_id=project_id, index_col=index_col,
col_order=col_order, reauth=reauth, verbose=verbose,
Expand Down
18 changes: 17 additions & 1 deletion pandas/tests/io/test_gbq.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,17 @@
import platform
import os

try:
from unittest import mock
except ImportError:
mock = pytest.importorskip("mock")

import numpy as np
import pandas as pd
from pandas import compat, DataFrame

from pandas.compat import range
import pandas.util.testing as tm


pandas_gbq = pytest.importorskip('pandas_gbq')

Expand Down Expand Up @@ -93,6 +99,16 @@ def make_mixed_dataframe_v2(test_size):
index=range(test_size))


def test_read_gbq_without_dialect_warns_future_change(monkeypatch):
# Default dialect is changing to standard SQL. See:
# https://github.com/pydata/pandas-gbq/issues/195
mock_read_gbq = mock.Mock()
mock_read_gbq.return_value = DataFrame([[1.0]])
monkeypatch.setattr(pandas_gbq, 'read_gbq', mock_read_gbq)
with tm.assert_produces_warning(FutureWarning):
pd.read_gbq("SELECT 1")


@pytest.mark.single
class TestToGBQIntegrationWithServiceAccountKeyPath(object):

Expand Down

0 comments on commit cd18b7d

Please sign in to comment.