Skip to content

Commit

Permalink
Merge pull request #806 from dhermes/rename-get-all-buckets
Browse files Browse the repository at this point in the history
Renaming storage.get_all_buckets to storage.list_buckets.
  • Loading branch information
dhermes committed Apr 9, 2015
2 parents 41b5b72 + 9be17de commit 1cdcc6d
Show file tree
Hide file tree
Showing 8 changed files with 133 additions and 31 deletions.
2 changes: 1 addition & 1 deletion docs/_components/storage-getting-started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ If you have a full bucket, you can delete it this way::
Listing available buckets
-------------------------

>>> for bucket in storage.get_all_buckets(connection):
>>> for bucket in storage.list_buckets(connection):
... print bucket.name

Managing access control
Expand Down
2 changes: 1 addition & 1 deletion docs/_components/storage-quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ Once you have the connection,
you can create buckets and blobs::

>>> from gcloud import storage
>>> storage.get_all_buckets(connection)
>>> storage.list_buckets(connection)
[<Bucket: ...>, ...]
>>> bucket = storage.create_bucket('my-new-bucket', connection=connection)
>>> print bucket
Expand Down
2 changes: 1 addition & 1 deletion gcloud/storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@
from gcloud.storage._implicit_environ import get_default_bucket
from gcloud.storage._implicit_environ import get_default_connection
from gcloud.storage.api import create_bucket
from gcloud.storage.api import get_all_buckets
from gcloud.storage.api import get_bucket
from gcloud.storage.api import list_buckets
from gcloud.storage.api import lookup_bucket
from gcloud.storage.batch import Batch
from gcloud.storage.blob import Blob
Expand Down
50 changes: 45 additions & 5 deletions gcloud/storage/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,22 +59,45 @@ def lookup_bucket(bucket_name, connection=None):
return None


def get_all_buckets(project=None, connection=None):
def list_buckets(project=None, max_results=None, page_token=None, prefix=None,
projection='noAcl', fields=None, connection=None):
"""Get all buckets in the project.
This will not populate the list of blobs available in each
bucket.
>>> from gcloud import storage
>>> for bucket in storage.get_all_buckets():
>>> for bucket in storage.list_buckets():
>>> print bucket
This implements "storage.buckets.list".
:type project: string
:type project: string or ``NoneType``
:param project: Optional. The project to use when listing all buckets.
If not provided, falls back to default.
:type max_results: integer or ``NoneType``
:param max_results: Optional. Maximum number of buckets to return.
:type page_token: string or ``NoneType``
:param page_token: Optional. Opaque marker for the next "page" of buckets.
If not passed, will return the first page of buckets.
:type prefix: string or ``NoneType``
:param prefix: Optional. Filter results to buckets whose names begin with
this prefix.
:type projection: string or ``NoneType``
:param projection: If used, must be 'full' or 'noAcl'. Defaults to
'noAcl'. Specifies the set of properties to return.
:type fields: string or ``NoneType``
:param fields: Selector specifying which fields to include in a
partial response. Must be a list of fields. For example
to get a partial response with just the next page token
and the language of each bucket returned:
'items/id,nextPageToken'
:type connection: :class:`gcloud.storage.connection.Connection` or
``NoneType``
:param connection: Optional. The connection to use when sending requests.
Expand All @@ -87,8 +110,25 @@ def get_all_buckets(project=None, connection=None):
if project is None:
project = get_default_project()
extra_params = {'project': project}
return iter(_BucketIterator(connection=connection,
extra_params=extra_params))

if max_results is not None:
extra_params['maxResults'] = max_results

if prefix is not None:
extra_params['prefix'] = prefix

extra_params['projection'] = projection

if fields is not None:
extra_params['fields'] = fields

result = _BucketIterator(connection=connection,
extra_params=extra_params)
# Page token must be handled specially since the base `Iterator`
# class has it as a reserved property.
if page_token is not None:
result.next_page_token = page_token
return iter(result)


def get_bucket(bucket_name, connection=None):
Expand Down
7 changes: 4 additions & 3 deletions gcloud/storage/demo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,14 @@
import os
from gcloud import storage

__all__ = ['create_bucket', 'get_all_buckets', 'PROJECT_ID']
__all__ = ['create_bucket', 'list_buckets', 'PROJECT_ID']

PROJECT_ID = os.getenv('GCLOUD_TESTS_PROJECT_ID')


def get_all_buckets(connection):
return list(storage.get_all_buckets(PROJECT_ID, connection))
def list_buckets(connection):
return list(storage.list_buckets(project=PROJECT_ID,
connection=connection))


def create_bucket(bucket_name, connection):
Expand Down
4 changes: 2 additions & 2 deletions gcloud/storage/demo/demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
connection = storage.get_connection()

# OK, now let's look at all of the buckets...
print(demo.get_all_buckets(connection)) # This might take a second...
print(list(demo.list_buckets(connection))) # This might take a second...

# Now let's create a new bucket...
bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots.
Expand All @@ -34,7 +34,7 @@
print(bucket)

# Let's look at all of the buckets again...
print(demo.get_all_buckets(connection))
print(list(demo.list_buckets(connection)))

# How about we create a new blob inside this bucket.
blob = storage.Blob("my-new-file.txt", bucket=bucket)
Expand Down
95 changes: 78 additions & 17 deletions gcloud/storage/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,42 +78,55 @@ def test_use_default(self):
self._lookup_bucket_hit_helper(use_default=True)


class Test_get_all_buckets(unittest2.TestCase):
class Test_list_buckets(unittest2.TestCase):

def _callFUT(self, project=None, connection=None):
from gcloud.storage.api import get_all_buckets
return get_all_buckets(project=project, connection=connection)
def _callFUT(self, *args, **kwargs):
from gcloud.storage.api import list_buckets
return list_buckets(*args, **kwargs)

def test_empty(self):
from six.moves.urllib.parse import parse_qs
from six.moves.urllib.parse import urlparse
from gcloud.storage.connection import Connection
PROJECT = 'project'
conn = Connection()
URI = '/'.join([
conn.API_BASE_URL,
'storage',
conn.API_VERSION,
'b?project=%s' % PROJECT,
])
EXPECTED_QUERY = {
'project': [PROJECT],
'projection': ['noAcl'],
}
http = conn._http = Http(
{'status': '200', 'content-type': 'application/json'},
b'{}',
)
buckets = list(self._callFUT(PROJECT, conn))
buckets = list(self._callFUT(project=PROJECT, connection=conn))
self.assertEqual(len(buckets), 0)
self.assertEqual(http._called_with['method'], 'GET')
self.assertEqual(http._called_with['uri'], URI)
self.assertEqual(http._called_with['body'], None)

BASE_URI = '/'.join([
conn.API_BASE_URL,
'storage',
conn.API_VERSION,
'b',
])
URI = http._called_with['uri']
self.assertTrue(URI.startswith(BASE_URI))
uri_parts = urlparse(URI)
self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY)

def _get_all_buckets_non_empty_helper(self, project, use_default=False):
def _list_buckets_non_empty_helper(self, project, use_default=False):
from six.moves.urllib.parse import urlencode
from gcloud._testing import _monkey_defaults as _base_monkey_defaults
from gcloud.storage._testing import _monkey_defaults
from gcloud.storage.connection import Connection
BUCKET_NAME = 'bucket-name'
conn = Connection()
query_params = urlencode({'project': project, 'projection': 'noAcl'})
URI = '/'.join([
conn.API_BASE_URL,
'storage',
conn.API_VERSION,
'b?project=%s' % project,
'b?%s' % (query_params,),
])
http = conn._http = Http(
{'status': '200', 'content-type': 'application/json'},
Expand All @@ -126,18 +139,66 @@ def _get_all_buckets_non_empty_helper(self, project, use_default=False):
with _monkey_defaults(connection=conn):
buckets = list(self._callFUT())
else:
buckets = list(self._callFUT(project, conn))
buckets = list(self._callFUT(project=project, connection=conn))

self.assertEqual(len(buckets), 1)
self.assertEqual(buckets[0].name, BUCKET_NAME)
self.assertEqual(http._called_with['method'], 'GET')
self.assertEqual(http._called_with['uri'], URI)

def test_non_empty(self):
self._get_all_buckets_non_empty_helper('PROJECT', use_default=False)
self._list_buckets_non_empty_helper('PROJECT', use_default=False)

def test_non_use_default(self):
self._get_all_buckets_non_empty_helper('PROJECT', use_default=True)
self._list_buckets_non_empty_helper('PROJECT', use_default=True)

def test_all_arguments(self):
from six.moves.urllib.parse import parse_qs
from six.moves.urllib.parse import urlparse
from gcloud.storage.connection import Connection
PROJECT = 'foo-bar'
MAX_RESULTS = 10
PAGE_TOKEN = 'ABCD'
PREFIX = 'subfolder'
PROJECTION = 'full'
FIELDS = 'items/id,nextPageToken'
EXPECTED_QUERY = {
'project': [PROJECT],
'maxResults': [str(MAX_RESULTS)],
'pageToken': [PAGE_TOKEN],
'prefix': [PREFIX],
'projection': [PROJECTION],
'fields': [FIELDS],
}
CONNECTION = Connection()
http = CONNECTION._http = Http(
{'status': '200', 'content-type': 'application/json'},
'{"items": []}',
)
iterator = self._callFUT(
project=PROJECT,
max_results=MAX_RESULTS,
page_token=PAGE_TOKEN,
prefix=PREFIX,
projection=PROJECTION,
fields=FIELDS,
connection=CONNECTION,
)
buckets = list(iterator)
self.assertEqual(buckets, [])
self.assertEqual(http._called_with['method'], 'GET')
self.assertEqual(http._called_with['body'], None)

BASE_URI = '/'.join([
CONNECTION.API_BASE_URL,
'storage',
CONNECTION.API_VERSION,
'b'
])
URI = http._called_with['uri']
self.assertTrue(URI.startswith(BASE_URI))
uri_parts = urlparse(URI)
self.assertEqual(parse_qs(uri_parts.query), EXPECTED_QUERY)


class Test_get_bucket(unittest2.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion regression/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def test_get_buckets(self):
self.case_buckets_to_delete.append(bucket_name)

# Retrieve the buckets.
all_buckets = storage.get_all_buckets()
all_buckets = storage.list_buckets()
created_buckets = [bucket for bucket in all_buckets
if bucket.name in buckets_to_create]
self.assertEqual(len(created_buckets), len(buckets_to_create))
Expand Down

0 comments on commit 1cdcc6d

Please sign in to comment.