From 9be17dec627f23002cd2a9ab55172c9a43d3ab11 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 8 Apr 2015 15:04:00 -0700 Subject: [PATCH] Renaming storage.get_all_buckets to storage.list_buckets. Also adding support for optional query parameters of storage.buckets.list. --- docs/_components/storage-getting-started.rst | 2 +- docs/_components/storage-quickstart.rst | 2 +- gcloud/storage/__init__.py | 2 +- gcloud/storage/api.py | 50 +++++++++-- gcloud/storage/demo/__init__.py | 7 +- gcloud/storage/demo/demo.py | 4 +- gcloud/storage/test_api.py | 95 ++++++++++++++++---- regression/storage.py | 2 +- 8 files changed, 133 insertions(+), 31 deletions(-) diff --git a/docs/_components/storage-getting-started.rst b/docs/_components/storage-getting-started.rst index ea92b4111611..ed0bec69ea5d 100644 --- a/docs/_components/storage-getting-started.rst +++ b/docs/_components/storage-getting-started.rst @@ -184,7 +184,7 @@ If you have a full bucket, you can delete it this way:: Listing available buckets ------------------------- - >>> for bucket in storage.get_all_buckets(connection): + >>> for bucket in storage.list_buckets(connection): ... print bucket.name Managing access control diff --git a/docs/_components/storage-quickstart.rst b/docs/_components/storage-quickstart.rst index f32bd77b6b8f..a103a8a73380 100644 --- a/docs/_components/storage-quickstart.rst +++ b/docs/_components/storage-quickstart.rst @@ -56,7 +56,7 @@ Once you have the connection, you can create buckets and blobs:: >>> from gcloud import storage - >>> storage.get_all_buckets(connection) + >>> storage.list_buckets(connection) [, ...] >>> bucket = storage.create_bucket('my-new-bucket', connection=connection) >>> print bucket diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index d53861fd7b79..5f76864320c6 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -48,8 +48,8 @@ from gcloud.storage._implicit_environ import get_default_bucket from gcloud.storage._implicit_environ import get_default_connection from gcloud.storage.api import create_bucket -from gcloud.storage.api import get_all_buckets from gcloud.storage.api import get_bucket +from gcloud.storage.api import list_buckets from gcloud.storage.api import lookup_bucket from gcloud.storage.batch import Batch from gcloud.storage.blob import Blob diff --git a/gcloud/storage/api.py b/gcloud/storage/api.py index edd31abe81d0..ae3243a6ed52 100644 --- a/gcloud/storage/api.py +++ b/gcloud/storage/api.py @@ -59,22 +59,45 @@ def lookup_bucket(bucket_name, connection=None): return None -def get_all_buckets(project=None, connection=None): +def list_buckets(project=None, max_results=None, page_token=None, prefix=None, + projection='noAcl', fields=None, connection=None): """Get all buckets in the project. This will not populate the list of blobs available in each bucket. >>> from gcloud import storage - >>> for bucket in storage.get_all_buckets(): + >>> for bucket in storage.list_buckets(): >>> print bucket This implements "storage.buckets.list". - :type project: string + :type project: string or ``NoneType`` :param project: Optional. The project to use when listing all buckets. If not provided, falls back to default. + :type max_results: integer or ``NoneType`` + :param max_results: Optional. Maximum number of buckets to return. + + :type page_token: string or ``NoneType`` + :param page_token: Optional. Opaque marker for the next "page" of buckets. + If not passed, will return the first page of buckets. + + :type prefix: string or ``NoneType`` + :param prefix: Optional. Filter results to buckets whose names begin with + this prefix. + + :type projection: string or ``NoneType`` + :param projection: If used, must be 'full' or 'noAcl'. Defaults to + 'noAcl'. Specifies the set of properties to return. + + :type fields: string or ``NoneType`` + :param fields: Selector specifying which fields to include in a + partial response. Must be a list of fields. For example + to get a partial response with just the next page token + and the language of each bucket returned: + 'items/id,nextPageToken' + :type connection: :class:`gcloud.storage.connection.Connection` or ``NoneType`` :param connection: Optional. The connection to use when sending requests. @@ -87,8 +110,25 @@ def get_all_buckets(project=None, connection=None): if project is None: project = get_default_project() extra_params = {'project': project} - return iter(_BucketIterator(connection=connection, - extra_params=extra_params)) + + if max_results is not None: + extra_params['maxResults'] = max_results + + if prefix is not None: + extra_params['prefix'] = prefix + + extra_params['projection'] = projection + + if fields is not None: + extra_params['fields'] = fields + + result = _BucketIterator(connection=connection, + extra_params=extra_params) + # Page token must be handled specially since the base `Iterator` + # class has it as a reserved property. + if page_token is not None: + result.next_page_token = page_token + return iter(result) def get_bucket(bucket_name, connection=None): diff --git a/gcloud/storage/demo/__init__.py b/gcloud/storage/demo/__init__.py index 26e949ba8c45..0558e7f8bf6c 100644 --- a/gcloud/storage/demo/__init__.py +++ b/gcloud/storage/demo/__init__.py @@ -15,13 +15,14 @@ import os from gcloud import storage -__all__ = ['create_bucket', 'get_all_buckets', 'PROJECT_ID'] +__all__ = ['create_bucket', 'list_buckets', 'PROJECT_ID'] PROJECT_ID = os.getenv('GCLOUD_TESTS_PROJECT_ID') -def get_all_buckets(connection): - return list(storage.get_all_buckets(PROJECT_ID, connection)) +def list_buckets(connection): + return list(storage.list_buckets(project=PROJECT_ID, + connection=connection)) def create_bucket(bucket_name, connection): diff --git a/gcloud/storage/demo/demo.py b/gcloud/storage/demo/demo.py index 6526c99aa4a9..93d8766f4049 100644 --- a/gcloud/storage/demo/demo.py +++ b/gcloud/storage/demo/demo.py @@ -25,7 +25,7 @@ connection = storage.get_connection() # OK, now let's look at all of the buckets... -print(demo.get_all_buckets(connection)) # This might take a second... +print(list(demo.list_buckets(connection))) # This might take a second... # Now let's create a new bucket... bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots. @@ -34,7 +34,7 @@ print(bucket) # Let's look at all of the buckets again... -print(demo.get_all_buckets(connection)) +print(list(demo.list_buckets(connection))) # How about we create a new blob inside this bucket. blob = storage.Blob("my-new-file.txt", bucket=bucket) diff --git a/gcloud/storage/test_api.py b/gcloud/storage/test_api.py index b06e7eb7d853..2259a704d7f5 100644 --- a/gcloud/storage/test_api.py +++ b/gcloud/storage/test_api.py @@ -78,42 +78,55 @@ def test_use_default(self): self._lookup_bucket_hit_helper(use_default=True) -class Test_get_all_buckets(unittest2.TestCase): +class Test_list_buckets(unittest2.TestCase): - def _callFUT(self, project=None, connection=None): - from gcloud.storage.api import get_all_buckets - return get_all_buckets(project=project, connection=connection) + def _callFUT(self, *args, **kwargs): + from gcloud.storage.api import list_buckets + return list_buckets(*args, **kwargs) def test_empty(self): + from six.moves.urllib.parse import parse_qs + from six.moves.urllib.parse import urlparse from gcloud.storage.connection import Connection PROJECT = 'project' conn = Connection() - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % PROJECT, - ]) + EXPECTED_QUERY = { + 'project': [PROJECT], + 'projection': ['noAcl'], + } http = conn._http = Http( {'status': '200', 'content-type': 'application/json'}, b'{}', ) - buckets = list(self._callFUT(PROJECT, conn)) + buckets = list(self._callFUT(project=PROJECT, connection=conn)) self.assertEqual(len(buckets), 0) self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) + self.assertEqual(http._called_with['body'], None) + + BASE_URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + ]) + URI = http._called_with['uri'] + self.assertTrue(URI.startswith(BASE_URI)) + uri_parts = urlparse(URI) + self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) - def _get_all_buckets_non_empty_helper(self, project, use_default=False): + def _list_buckets_non_empty_helper(self, project, use_default=False): + from six.moves.urllib.parse import urlencode from gcloud._testing import _monkey_defaults as _base_monkey_defaults from gcloud.storage._testing import _monkey_defaults from gcloud.storage.connection import Connection BUCKET_NAME = 'bucket-name' conn = Connection() + query_params = urlencode({'project': project, 'projection': 'noAcl'}) URI = '/'.join([ conn.API_BASE_URL, 'storage', conn.API_VERSION, - 'b?project=%s' % project, + 'b?%s' % (query_params,), ]) http = conn._http = Http( {'status': '200', 'content-type': 'application/json'}, @@ -126,7 +139,7 @@ def _get_all_buckets_non_empty_helper(self, project, use_default=False): with _monkey_defaults(connection=conn): buckets = list(self._callFUT()) else: - buckets = list(self._callFUT(project, conn)) + buckets = list(self._callFUT(project=project, connection=conn)) self.assertEqual(len(buckets), 1) self.assertEqual(buckets[0].name, BUCKET_NAME) @@ -134,10 +147,58 @@ def _get_all_buckets_non_empty_helper(self, project, use_default=False): self.assertEqual(http._called_with['uri'], URI) def test_non_empty(self): - self._get_all_buckets_non_empty_helper('PROJECT', use_default=False) + self._list_buckets_non_empty_helper('PROJECT', use_default=False) def test_non_use_default(self): - self._get_all_buckets_non_empty_helper('PROJECT', use_default=True) + self._list_buckets_non_empty_helper('PROJECT', use_default=True) + + def test_all_arguments(self): + from six.moves.urllib.parse import parse_qs + from six.moves.urllib.parse import urlparse + from gcloud.storage.connection import Connection + PROJECT = 'foo-bar' + MAX_RESULTS = 10 + PAGE_TOKEN = 'ABCD' + PREFIX = 'subfolder' + PROJECTION = 'full' + FIELDS = 'items/id,nextPageToken' + EXPECTED_QUERY = { + 'project': [PROJECT], + 'maxResults': [str(MAX_RESULTS)], + 'pageToken': [PAGE_TOKEN], + 'prefix': [PREFIX], + 'projection': [PROJECTION], + 'fields': [FIELDS], + } + CONNECTION = Connection() + http = CONNECTION._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"items": []}', + ) + iterator = self._callFUT( + project=PROJECT, + max_results=MAX_RESULTS, + page_token=PAGE_TOKEN, + prefix=PREFIX, + projection=PROJECTION, + fields=FIELDS, + connection=CONNECTION, + ) + buckets = list(iterator) + self.assertEqual(buckets, []) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['body'], None) + + BASE_URI = '/'.join([ + CONNECTION.API_BASE_URL, + 'storage', + CONNECTION.API_VERSION, + 'b' + ]) + URI = http._called_with['uri'] + self.assertTrue(URI.startswith(BASE_URI)) + uri_parts = urlparse(URI) + self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY) class Test_get_bucket(unittest2.TestCase): diff --git a/regression/storage.py b/regression/storage.py index ba17927837fe..027d5b57cdc7 100644 --- a/regression/storage.py +++ b/regression/storage.py @@ -75,7 +75,7 @@ def test_get_buckets(self): self.case_buckets_to_delete.append(bucket_name) # Retrieve the buckets. - all_buckets = storage.get_all_buckets() + all_buckets = storage.list_buckets() created_buckets = [bucket for bucket in all_buckets if bucket.name in buckets_to_create] self.assertEqual(len(created_buckets), len(buckets_to_create))