Skip to content

Commit

Permalink
Merge pull request #1737 from dhermes/re-enable-builtins-lint-check
Browse files Browse the repository at this point in the history
Re-enabling redefined-builtin for Pylint.
  • Loading branch information
dhermes committed Apr 22, 2016
2 parents fa39ef6 + aefdd22 commit f227fdc
Show file tree
Hide file tree
Showing 8 changed files with 38 additions and 37 deletions.
31 changes: 17 additions & 14 deletions gcloud/bigtable/happybase/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def cells(self, row, column, versions=None, timestamp=None,
curr_cells, include_timestamp=include_timestamp)

def scan(self, row_start=None, row_stop=None, row_prefix=None,
columns=None, filter=None, timestamp=None,
columns=None, timestamp=None,
include_timestamp=False, limit=None, **kwargs):
"""Create a scanner for data in this table.
Expand All @@ -314,6 +314,15 @@ def scan(self, row_start=None, row_stop=None, row_prefix=None,
omitted, a full table scan is done. Note that this usually results
in severe performance problems.
The keyword argument ``filter`` is also supported (beyond column and
row range filters supported here). HappyBase / HBase users will have
used this as an HBase filter string. (See the `Thrift docs`_ for more
details on those filters.) However, Google Cloud Bigtable doesn't
support those filter strings so a
:class:`~gcloud.bigtable.row.RowFilter` should be used instead.
.. _Thrift docs: http://hbase.apache.org/0.94/book/thrift.html
The arguments ``batch_size``, ``scan_batching`` and ``sorted_columns``
are allowed (as keyword arguments) for compatibility with
HappyBase. However, they will not be used in any way, and will cause a
Expand Down Expand Up @@ -348,13 +357,6 @@ def scan(self, row_start=None, row_stop=None, row_prefix=None,
* an entire column family: ``fam`` or ``fam:``
* a single column: ``fam:col``
:type filter: :class:`RowFilter <gcloud.bigtable.row.RowFilter>`
:param filter: (Optional) An additional filter (beyond column and
row range filters supported here). HappyBase / HBase
users will have used this as an HBase filter string. See
http://hbase.apache.org/0.94/book/thrift.html
for more details on those filters.
:type timestamp: int
:param timestamp: (Optional) Timestamp (in milliseconds since the
epoch). If specified, only cells returned before (or
Expand All @@ -376,6 +378,7 @@ def scan(self, row_start=None, row_stop=None, row_prefix=None,
:class:`TypeError <exceptions.TypeError>` if a string
``filter`` is used.
"""
filter_ = kwargs.pop('filter', None)
legacy_args = []
for kw_name in ('batch_size', 'scan_batching', 'sorted_columns'):
if kw_name in kwargs:
Expand All @@ -399,22 +402,22 @@ def scan(self, row_start=None, row_stop=None, row_prefix=None,
row_stop = _string_successor(row_prefix)

filters = []
if isinstance(filter, six.string_types):
if isinstance(filter_, six.string_types):
raise TypeError('Specifying filters as a string is not supported '
'by Cloud Bigtable. Use a '
'gcloud.bigtable.row.RowFilter instead.')
elif filter is not None:
filters.append(filter)
elif filter_ is not None:
filters.append(filter_)

if columns is not None:
filters.append(_columns_filter_helper(columns))
# versions == 1 since we only want the latest.
filter_ = _filter_chain_helper(versions=1, timestamp=timestamp,
filters=filters)
filter_chain = _filter_chain_helper(versions=1, timestamp=timestamp,
filters=filters)

partial_rows_data = self._low_level_table.read_rows(
start_key=row_start, end_key=row_stop,
limit=limit, filter_=filter_)
limit=limit, filter_=filter_chain)

# Mutable copy of data.
rows_dict = partial_rows_data.rows
Expand Down
8 changes: 4 additions & 4 deletions gcloud/datastore/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,14 +298,14 @@ def test_as_context_mgr_w_error(self):

class _PathElementPB(object):

def __init__(self, id):
self.id = id
def __init__(self, id_):
self.id = id_


class _KeyPB(object):

def __init__(self, id):
self.path = [_PathElementPB(id)]
def __init__(self, id_):
self.path = [_PathElementPB(id_)]


class _Connection(object):
Expand Down
18 changes: 9 additions & 9 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ def _getTargetClass(self):

return Connection

def _make_key_pb(self, project, id=1234):
def _make_key_pb(self, project, id_=1234):
from gcloud.datastore.key import Key
path_args = ('Kind',)
if id is not None:
path_args += (id,)
if id_ is not None:
path_args += (id_,)
return Key(*path_args, project=project).to_protobuf()

def _make_query_pb(self, kind):
Expand Down Expand Up @@ -362,7 +362,7 @@ def test_lookup_multiple_keys_empty_response(self):

PROJECT = 'PROJECT'
key_pb1 = self._make_key_pb(PROJECT)
key_pb2 = self._make_key_pb(PROJECT, id=2345)
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
Expand Down Expand Up @@ -391,7 +391,7 @@ def test_lookup_multiple_keys_w_missing(self):

PROJECT = 'PROJECT'
key_pb1 = self._make_key_pb(PROJECT)
key_pb2 = self._make_key_pb(PROJECT, id=2345)
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
er_1 = rsp_pb.missing.add()
er_1.entity.key.CopyFrom(key_pb1)
Expand Down Expand Up @@ -425,7 +425,7 @@ def test_lookup_multiple_keys_w_deferred(self):

PROJECT = 'PROJECT'
key_pb1 = self._make_key_pb(PROJECT)
key_pb2 = self._make_key_pb(PROJECT, id=2345)
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
rsp_pb.deferred.add().CopyFrom(key_pb1)
rsp_pb.deferred.add().CopyFrom(key_pb2)
Expand Down Expand Up @@ -778,12 +778,12 @@ def test_allocate_ids_non_empty(self):

PROJECT = 'PROJECT'
before_key_pbs = [
self._make_key_pb(PROJECT, id=None),
self._make_key_pb(PROJECT, id=None),
self._make_key_pb(PROJECT, id_=None),
self._make_key_pb(PROJECT, id_=None),
]
after_key_pbs = [
self._make_key_pb(PROJECT),
self._make_key_pb(PROJECT, id=2345),
self._make_key_pb(PROJECT, id_=2345),
]
rsp_pb = datastore_pb2.AllocateIdsResponse()
rsp_pb.keys.add().CopyFrom(after_key_pbs[0])
Expand Down
2 changes: 1 addition & 1 deletion gcloud/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ class InternalServerError(ServerError):
code = 500


class NotImplemented(ServerError):
class MethodNotImplemented(ServerError):
"""Exception mapping a '501 Not Implemented' response."""
code = 501

Expand Down
5 changes: 3 additions & 2 deletions gcloud/search/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,8 +204,9 @@ def _parse_value_resource(resource):
return TimestampValue(value)
if 'geoValue' in resource:
lat_long = resource['geoValue']
lat, long = [float(coord.strip()) for coord in lat_long.split(',')]
return GeoValue((lat, long))
latitude, longitude = [float(coord.strip())
for coord in lat_long.split(',')]
return GeoValue((latitude, longitude))
raise ValueError("Unknown value type")

def _parse_fields_resource(self, resource):
Expand Down
3 changes: 0 additions & 3 deletions scripts/pylintrc_default
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,6 @@ load-plugins=pylint.extensions.check_docs
# - maybe-no-member: bi-modal functions confuse pylint type inference.
# - no-member: indirections in protobuf-generated code
# - protected-access: helpers use '_foo' of classes from generated code.
# - redefined-builtin: use of 'id', 'type', 'filter' args in API-bound funcs;
# use of 'NotImplemented' to map HTTP response code.
# - similarities: 'Bucket' and 'Blob' define 'metageneration' and 'owner' with
# identical implementation but different docstrings.
# - star-args: standard Python idioms for varargs:
Expand All @@ -93,7 +91,6 @@ disable =
maybe-no-member,
no-member,
protected-access,
redefined-builtin,
similarities,
star-args,
redefined-variable-type,
Expand Down
4 changes: 2 additions & 2 deletions system_tests/clear_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import os

from six.moves import input
import six

from gcloud import datastore
from gcloud.environment_vars import TESTS_PROJECT
Expand Down Expand Up @@ -99,7 +99,7 @@ def remove_all_entities(client=None):
print_func('This command will remove all entities for '
'the following kinds:')
print_func('\n'.join(['- ' + val for val in ALL_KINDS]))
response = input('Is this OK [y/n]? ')
response = six.moves.input('Is this OK [y/n]? ')
if response.lower() == 'y':
remove_all_entities()
else:
Expand Down
4 changes: 2 additions & 2 deletions system_tests/populate_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import os

from six.moves import zip
import six

from gcloud import datastore
from gcloud.environment_vars import TESTS_PROJECT
Expand Down Expand Up @@ -93,7 +93,7 @@ def add_characters(client=None):
# Get a client that uses the test dataset.
client = datastore.Client(project=os.getenv(TESTS_PROJECT))
with client.transaction() as xact:
for key_path, character in zip(KEY_PATHS, CHARACTERS):
for key_path, character in six.moves.zip(KEY_PATHS, CHARACTERS):
if key_path[-1] != character['name']:
raise ValueError(('Character and key don\'t agree',
key_path, character))
Expand Down

0 comments on commit f227fdc

Please sign in to comment.