Skip to content
This repository has been archived by the owner on Sep 3, 2022. It is now read-only.

Modify async to async_ to avoid reserved keyword clash. #728

Merged
merged 5 commits into from
Mar 24, 2020
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions datalab/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

"""Google Cloud Platform library - Internal Helpers."""

from ._async import async, async_function, async_method
from ._async import async_, async_function, async_method
from ._gcp_job import GCPJob
from ._http import Http, RequestException
from ._iterator import Iterator
Expand All @@ -24,7 +24,7 @@
from ._utils import print_exception_with_last_stack, get_item, compare_datetimes, \
pick_unused_port, is_http_running_on, gcs_copy_file

__all__ = ['async', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException',
__all__ = ['async_', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException',
'Iterator', 'Job', 'JobError', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob',
'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port',
'is_http_running_on', 'gcs_copy_file']
12 changes: 6 additions & 6 deletions datalab/utils/_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from future.utils import with_metaclass


class async(with_metaclass(abc.ABCMeta, object)):
class async_(with_metaclass(abc.ABCMeta, object)):
""" Base class for async_function/async_method. Creates a wrapped function/method that will
run the original function/method on a thread pool worker thread and return a Job instance
for monitoring the status of the thread.
Expand Down Expand Up @@ -55,27 +55,27 @@ def __call__(self, *args, **kwargs):
return _job.Job(future=self.executor.submit(self._call, *args, **kwargs))


class async_function(async):
class async_function(async_):
""" This decorator can be applied to any static function that makes blocking calls to create
a modified version that creates a Job and returns immediately; the original
method will be called on a thread pool worker thread.
"""

def _call(self, *args, **kwargs):
# Call the wrapped method.
return self._function(*async._preprocess_args(*args), **async._preprocess_kwargs(**kwargs))
return self._function(*async_._preprocess_args(*args), **async_._preprocess_kwargs(**kwargs))


class async_method(async):
class async_method(async_):
""" This decorator can be applied to any class instance method that makes blocking calls to create
a modified version that creates a Job and returns immediately; the original method will be
called on a thread pool worker thread.
"""

def _call(self, *args, **kwargs):
# Call the wrapped method.
return self._function(self.obj, *async._preprocess_args(*args),
**async._preprocess_kwargs(**kwargs))
return self._function(self.obj, *async_._preprocess_args(*args),
**async_._preprocess_kwargs(**kwargs))

def __get__(self, instance, owner):
# This is important for attribute inheritance and setting self.obj so it can be
Expand Down
2 changes: 1 addition & 1 deletion datalab/utils/_lambda_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs):
job_id: an optional ID for the job. If None, a UUID will be generated.
"""
super(LambdaJob, self).__init__(job_id)
self._future = _async.async.executor.submit(fn, *args, **kwargs)
self._future = _async.async_.executor.submit(fn, *args, **kwargs)

def __repr__(self):
"""Returns a representation for the job for showing in the notebook.
Expand Down
4 changes: 2 additions & 2 deletions google/datalab/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

"""Google Cloud Platform library - Internal Helpers."""

from ._async import async, async_function, async_method
from ._async import async_, async_function, async_method
from ._http import Http, RequestException
from ._iterator import Iterator
from ._json_encoder import JSONEncoder
Expand All @@ -23,7 +23,7 @@
pick_unused_port, is_http_running_on, gcs_copy_file, python_portable_string


__all__ = ['async', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator',
__all__ = ['async_', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator',
'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob',
'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port',
'is_http_running_on', 'gcs_copy_file', 'python_portable_string']
12 changes: 6 additions & 6 deletions google/datalab/utils/_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from future.utils import with_metaclass


class async(with_metaclass(abc.ABCMeta, object)):
class async_(with_metaclass(abc.ABCMeta, object)):
""" Base class for async_function/async_method. Creates a wrapped function/method that will
run the original function/method on a thread pool worker thread and return a Job instance
for monitoring the status of the thread.
Expand Down Expand Up @@ -55,27 +55,27 @@ def __call__(self, *args, **kwargs):
return Job(future=self.executor.submit(self._call, *args, **kwargs))


class async_function(async):
class async_function(async_):
""" This decorator can be applied to any static function that makes blocking calls to create
a modified version that creates a Job and returns immediately; the original
method will be called on a thread pool worker thread.
"""

def _call(self, *args, **kwargs):
# Call the wrapped method.
return self._function(*async._preprocess_args(*args), **async._preprocess_kwargs(**kwargs))
return self._function(*async_._preprocess_args(*args), **async_._preprocess_kwargs(**kwargs))


class async_method(async):
class async_method(async_):
""" This decorator can be applied to any class instance method that makes blocking calls to create
a modified version that creates a Job and returns immediately; the original method will be
called on a thread pool worker thread.
"""

def _call(self, *args, **kwargs):
# Call the wrapped method.
return self._function(self.obj, *async._preprocess_args(*args),
**async._preprocess_kwargs(**kwargs))
return self._function(self.obj, *async_._preprocess_args(*args),
**async_._preprocess_kwargs(**kwargs))

def __get__(self, instance, owner):
# This is important for attribute inheritance and setting self.obj so it can be
Expand Down
2 changes: 1 addition & 1 deletion google/datalab/utils/_lambda_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs):
job_id: an optional ID for the job. If None, a UUID will be generated.
"""
super(LambdaJob, self).__init__(job_id)
self._future = _async.async.executor.submit(fn, *args, **kwargs)
self._future = _async.async_.executor.submit(fn, *args, **kwargs)

def __repr__(self):
"""Returns a representation for the job for showing in the notebook.
Expand Down
6 changes: 3 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tox]
# By default, we want to run tests for Python 2.7, Python 3.5, and run our
# flake8 checks.
envlist = py27,py35,flake8,coveralls
# By default, we want to run tests for Python 2.7, Python 3.5, Python 3.7,
# and run our flake8 checks.
envlist = py27,py35,py37,flake8,coveralls
# If an interpreter is missing locally, skip it.
skip_missing_interpreters = true

Expand Down