From a1a43c1743ce7e5b0d1bb5c4a65974a5c06f8e3e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 15:22:23 -0400 Subject: [PATCH 01/17] Unused imports. --- gcloud/streaming/transfer.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 6630525707ba..0f8fc48c9a21 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -1,15 +1,11 @@ # pylint: skip-file """Upload and download support for apitools.""" -from __future__ import print_function import email.generator as email_generator import email.mime.multipart as mime_multipart import email.mime.nonmultipart as mime_nonmultipart -import io -import json import mimetypes import os -import threading import six from six.moves import http_client From e74b5aec22850ed7dfd7f41cd9c2203fd0e7b935 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 15:45:06 -0400 Subject: [PATCH 02/17] Make imports explicit, and document them. --- gcloud/storage/blob.py | 9 +- gcloud/streaming/buffered_stream.py | 4 + gcloud/streaming/exceptions.py | 27 +++++- gcloud/streaming/http_wrapper.py | 38 +++++--- gcloud/streaming/stream_slice.py | 7 +- gcloud/streaming/test_transfer.py | 143 ++++++++++------------------ gcloud/streaming/transfer.py | 119 ++++++++++++----------- gcloud/streaming/util.py | 10 +- 8 files changed, 187 insertions(+), 170 deletions(-) diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index 409cd426ede6..0671285ccf87 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -25,7 +25,6 @@ import six from six.moves.urllib.parse import quote # pylint: disable=F0401 -from gcloud.streaming import http_wrapper from gcloud.streaming import transfer from gcloud._helpers import _RFC3339_MICROS @@ -35,6 +34,8 @@ from gcloud.storage._helpers import _PropertyMixin from gcloud.storage._helpers import _scalar_property from gcloud.storage.acl import ObjectACL +from gcloud.streaming.http_wrapper import Request +from gcloud.streaming.http_wrapper import MakeRequest _API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' @@ -263,7 +264,7 @@ def download_to_file(self, file_obj, client=None): if self.chunk_size is not None: download.chunksize = self.chunk_size headers['Range'] = 'bytes=0-%d' % (self.chunk_size - 1,) - request = http_wrapper.Request(download_url, 'GET', headers) + request = Request(download_url, 'GET', headers) # Use the private ``_connection`` rather than the public # ``.connection``, since the public connection may be a batch. A @@ -397,7 +398,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None, path=self.bucket.path + '/o') # Use apitools 'Upload' facility. - request = http_wrapper.Request(upload_url, 'POST', headers) + request = Request(upload_url, 'POST', headers) upload.ConfigureRequest(upload_config, request, url_builder) query_params = url_builder.query_params @@ -410,7 +411,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None, if upload.strategy == transfer.RESUMABLE_UPLOAD: http_response = upload.StreamInChunks() else: - http_response = http_wrapper.MakeRequest(connection.http, request, + http_response = MakeRequest(connection.http, request, retries=num_retries) response_content = http_response.content if not isinstance(response_content, diff --git a/gcloud/streaming/buffered_stream.py b/gcloud/streaming/buffered_stream.py index 0920ea0499e2..6e89b6c0df22 100644 --- a/gcloud/streaming/buffered_stream.py +++ b/gcloud/streaming/buffered_stream.py @@ -2,6 +2,10 @@ """Small helper class to provide a small slice of a stream. This class reads ahead to detect if we are at the end of the stream. + +:mod:`gcloud._apidools.transfer` uses: + +- :class:`BufferedStream` """ from gcloud.streaming import exceptions diff --git a/gcloud/streaming/exceptions.py b/gcloud/streaming/exceptions.py index 14f6e5c2a8b6..a9055dab82a1 100644 --- a/gcloud/streaming/exceptions.py +++ b/gcloud/streaming/exceptions.py @@ -1,5 +1,30 @@ # pylint: skip-file -"""Exceptions for generated client libraries.""" +"""Exceptions for generated client libraries. + +:mod:`gcloud.streaming.http_wrapper` uses: + +- :exc:`BadStatusCodeError` +- :exc:`RequestError` +- :exc:`RetryAfterError` + +:mod:`gcloud._apidools.transfer` uses: + +- :exc:`CommunicationError` +- :exc:`ConfigurationValueError` +- :exc:`HttpError` +- :exc:`InvalidDataError` +- :exc:`InvalidUserInputError` +- :exc:`NotFoundError` +- :exc:`TransferInvalidError` +- :exc:`TransferRetryError` +- :exc:`UserError` + +:mod:`gcloud._apidools.util` uses: + +- :exc:`GeneratedClientError` +- :exc:`InvalidUserInputError` +- :exc:`TypecheckError` +""" class Error(Exception): diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index f9e7673b9933..9134355db588 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -3,6 +3,20 @@ This library wraps the underlying http library we use, which is currently httplib2. + + +:mod:`gcloud.storage.blob` uses: + +- :class:`Request` +- :function:`MakeRequest` + +:mod:`gcloud._apidools.transfer` uses: + +- :function:`GetHttp` +- :function:`HandleExceptionsAndRebuildHttpConnections` +- :function:`MakeRequest` +- :class:`Request` +- :const:`RESUME_INCOMPLETE` """ import collections @@ -16,8 +30,10 @@ from six.moves import http_client from six.moves.urllib import parse -from gcloud.streaming import exceptions -from gcloud.streaming import util +from gcloud.streaming.exceptions import BadStatusCodeError +from gcloud.streaming.exceptions import RequestError +from gcloud.streaming.exceptions import RetryAfterError +from gcloud.streaming.util import CalculateWaitForRetry __all__ = [ 'CheckResponse', @@ -112,7 +128,7 @@ def loggable_body(self): @loggable_body.setter def loggable_body(self, value): if self.body is None: - raise exceptions.RequestError( + raise RequestError( 'Cannot set loggable body on request with no body') self.__loggable_body = value @@ -192,13 +208,13 @@ def is_redirect(self): def CheckResponse(response): if response is None: # Caller shouldn't call us if the response is None, but handle anyway. - raise exceptions.RequestError( + raise RequestError( 'Request did not return a response.') elif (response.status_code >= 500 or response.status_code == TOO_MANY_REQUESTS): - raise exceptions.BadStatusCodeError.FromResponse(response) + raise BadStatusCodeError.FromResponse(response) elif response.retry_after: - raise exceptions.RetryAfterError.FromResponse(response) + raise RetryAfterError.FromResponse(response) def RebuildHttpConnections(http): @@ -255,13 +271,13 @@ def HandleExceptionsAndRebuildHttpConnections(retry_args): # oauth2client, need to handle it here. logging.debug('Response content was invalid (%s), retrying', retry_args.exc) - elif isinstance(retry_args.exc, exceptions.RequestError): + elif isinstance(retry_args.exc, RequestError): logging.debug('Request returned no response, retrying') # API-level failures - elif isinstance(retry_args.exc, exceptions.BadStatusCodeError): + elif isinstance(retry_args.exc, BadStatusCodeError): logging.debug('Response returned status %s, retrying', retry_args.exc.status_code) - elif isinstance(retry_args.exc, exceptions.RetryAfterError): + elif isinstance(retry_args.exc, RetryAfterError): logging.debug('Response returned a retry-after header, retrying') retry_after = retry_args.exc.retry_after else: @@ -270,7 +286,7 @@ def HandleExceptionsAndRebuildHttpConnections(retry_args): logging.debug('Retrying request to url %s after exception %s', retry_args.http_request.url, retry_args.exc) time.sleep( - retry_after or util.CalculateWaitForRetry( + retry_after or CalculateWaitForRetry( retry_args.num_retries, max_wait=retry_args.max_retry_wait)) @@ -314,7 +330,7 @@ def _MakeRequestNoRetry(http, http_request, redirections=5, redirections=redirections, connection_type=connection_type) if info is None: - raise exceptions.RequestError() + raise RequestError() response = Response(info, content, http_request.url) check_response_func(response) diff --git a/gcloud/streaming/stream_slice.py b/gcloud/streaming/stream_slice.py index df3023d2b4ce..eedfcc5a5203 100644 --- a/gcloud/streaming/stream_slice.py +++ b/gcloud/streaming/stream_slice.py @@ -1,5 +1,10 @@ # pylint: skip-file -"""Small helper class to provide a small slice of a stream.""" +"""Small helper class to provide a small slice of a stream. + +:mod:`gcloud._apidools.transfer` uses: + +- :class:`StreamSlice` +""" from gcloud.streaming import exceptions diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index cabf65396ff9..39924dd780da 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -307,7 +307,7 @@ def test_InitializeDownload_w_autotransfer_failing(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy(MakeRequest=requester)): + MakeRequest=requester): with self.assertRaises(HttpError): download.InitializeDownload(request, http) @@ -327,7 +327,7 @@ def test_InitializeDownload_w_autotransfer_w_content_location(self): response = _makeResponse(http_client.NO_CONTENT, info) requester = _MakeRequest(response) - with _Monkey(MUT, http_wrapper=_Dummy(MakeRequest=requester)): + with _Monkey(MUT, MakeRequest=requester): download.InitializeDownload(request, http) self.assertTrue(download._initial_response is None) @@ -450,9 +450,8 @@ def test__GetChunk_wo_additional_headers(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): found = download._GetChunk(0, 10) self.assertTrue(found is response) @@ -473,9 +472,8 @@ def test__GetChunk_w_additional_headers(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): found = download._GetChunk(0, 10, additional_headers=headers) self.assertTrue(found is response) @@ -580,9 +578,8 @@ def test_GetRange_wo_total_size_complete(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.GetRange(0, LEN) self.assertTrue(len(requester._requested), 1) @@ -611,9 +608,8 @@ def test_GetRange_wo_total_size_wo_end(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.GetRange(START) self.assertTrue(len(requester._requested), 1) @@ -644,9 +640,8 @@ def test_GetRange_w_total_size_partial_w_additional_headers(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.GetRange(0, PARTIAL_LEN, additional_headers=headers) self.assertTrue(len(requester._requested), 1) @@ -676,9 +671,8 @@ def test_GetRange_w_empty_chunk(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): with self.assertRaises(TransferRetryError): download.GetRange(START) @@ -707,9 +701,8 @@ def test_GetRange_w_total_size_wo_use_chunks(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.GetRange(0, use_chunks=False) self.assertTrue(len(requester._requested), 1) @@ -744,9 +737,8 @@ def test_GetRange_w_multiple_chunks(self): requester = _MakeRequest(response_1, response_2) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: _requests.pop(0), - MakeRequest=requester)): + Request=lambda url: _requests.pop(0), + MakeRequest=requester): download.GetRange(0) self.assertTrue(len(requester._requested), 2) @@ -785,19 +777,14 @@ def _stream_media(additional_headers=None, use_chunks=False): self.assertEqual(_called_with, [(headers, True)]) def test_StreamMedia_not_initialized(self): - from gcloud._testing import _Monkey - from gcloud.streaming import transfer as MUT from gcloud.streaming.exceptions import TransferInvalidError download = self._makeOne(_Stream()) - with _Monkey(MUT, http_wrapper=_Dummy()): - with self.assertRaises(TransferInvalidError): - found = download.StreamMedia() + with self.assertRaises(TransferInvalidError): + found = download.StreamMedia() def test_StreamMedia_w_initial_response_complete(self): from six.moves import http_client - from gcloud._testing import _Monkey - from gcloud.streaming import transfer as MUT CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) @@ -809,9 +796,7 @@ def test_StreamMedia_w_initial_response_complete(self): http = object() download._Initialize(http, _Request.URL) - with _Monkey(MUT, - http_wrapper=_Dummy()): - download.StreamMedia() + download.StreamMedia() self.assertEqual(stream._written, [CONTENT]) self.assertEqual(download.total_size, LEN) @@ -842,9 +827,8 @@ def test_StreamMedia_w_initial_response_incomplete(self): request = _Request() with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.StreamMedia() self.assertTrue(len(requester._requested), 1) @@ -874,9 +858,8 @@ def test_StreamMedia_wo_initial_response_wo_total_size(self): request = _Request() with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.StreamMedia() self.assertTrue(len(requester._requested), 1) @@ -906,9 +889,8 @@ def test_StreamMedia_wo_initial_response_w_addl_headers_wo_chunks(self): request = _Request() with _Monkey(MUT, - http_wrapper=_Dummy( - Request=lambda url: request, - MakeRequest=requester)): + Request=lambda url: request, + MakeRequest=requester): download.StreamMedia(additional_headers=headers, use_chunks=False) self.assertTrue(len(requester._requested), 1) @@ -1283,9 +1265,8 @@ def test_RefreshResumableUploadState_w_OK(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=_Request, - MakeRequest=requester)): + Request=_Request, + MakeRequest=requester): upload.RefreshResumableUploadState() self.assertTrue(upload.complete) @@ -1311,9 +1292,8 @@ def test_RefreshResumableUploadState_w_CREATED(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - Request=_Request, - MakeRequest=requester)): + Request=_Request, + MakeRequest=requester): upload.RefreshResumableUploadState() self.assertTrue(upload.complete) @@ -1339,10 +1319,8 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_w_range(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=_Request, - MakeRequest=requester)): + Request=_Request, + MakeRequest=requester): upload.RefreshResumableUploadState() self.assertFalse(upload.complete) @@ -1367,10 +1345,8 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_wo_range(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=_Request, - MakeRequest=requester)): + Request=_Request, + MakeRequest=requester): upload.RefreshResumableUploadState() self.assertFalse(upload.complete) @@ -1383,7 +1359,6 @@ def test_RefreshResumableUploadState_w_error(self): from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT from gcloud.streaming.exceptions import HttpError - from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) @@ -1396,10 +1371,8 @@ def test_RefreshResumableUploadState_w_error(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=_Request, - MakeRequest=requester)): + Request=_Request, + MakeRequest=requester): with self.assertRaises(HttpError): upload.RefreshResumableUploadState() @@ -1456,7 +1429,6 @@ def test_InitializeUpload_w_http_resumable_not_initialized_w_error(self): from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT from gcloud.streaming.exceptions import HttpError - from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) @@ -1466,10 +1438,7 @@ def test_InitializeUpload_w_http_resumable_not_initialized_w_error(self): response = _makeResponse(http_client.FORBIDDEN) requester = _MakeRequest(response) - with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - MakeRequest=requester)): + with _Monkey(MUT, MakeRequest=requester): with self.assertRaises(HttpError): upload.InitializeUpload(request, http=object()) @@ -1477,7 +1446,6 @@ def test_InitializeUpload_w_http_wo_auto_transfer_w_OK(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT - from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) @@ -1488,10 +1456,7 @@ def test_InitializeUpload_w_http_wo_auto_transfer_w_OK(self): response = _makeResponse(http_client.OK, info) requester = _MakeRequest(response) - with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - MakeRequest=requester)): + with _Monkey(MUT, MakeRequest=requester): upload.InitializeUpload(request, http=object()) self.assertEqual(upload._server_chunk_granularity, None) @@ -1504,7 +1469,6 @@ def test_InitializeUpload_w_client_w_auto_transfer_w_OK(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT - from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) @@ -1521,11 +1485,9 @@ def test_InitializeUpload_w_client_w_auto_transfer_w_OK(self): requester = _MakeRequest(response, chunk_response) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=lambda url, http_method, body: + Request=lambda url, http_method, body: _Request(url, http_method, body), - MakeRequest=requester)): + MakeRequest=requester): upload.InitializeUpload(request, client=client) self.assertEqual(upload._server_chunk_granularity, 100) @@ -1651,11 +1613,9 @@ def test__StreamMedia_incomplete(self): requester = _MakeRequest(response_1, response_2) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=lambda url, http_method, body: + Request=lambda url, http_method, body: _Request(url, http_method, body), - MakeRequest=requester)): + MakeRequest=requester): response = upload._StreamMedia() self.assertEqual(len(requester._responses), 0) @@ -1699,11 +1659,9 @@ def test__StreamMedia_incomplete_w_transfer_error_w_addl_headers(self): requester = _MakeRequest(response) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=lambda url, http_method, body: + Request=lambda url, http_method, body: _Request(url, http_method, body), - MakeRequest=requester)): + MakeRequest=requester): with self.assertRaises(CommunicationError): upload._StreamMedia(additional_headers=headers) @@ -1766,10 +1724,7 @@ def test__SendMediaRequest_wo_error(self): response = _makeResponse(RESUME_INCOMPLETE, info) requester = _MakeRequest(response) - with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - MakeRequest=requester)): + with _Monkey(MUT, MakeRequest=requester): upload._SendMediaRequest(request, 9) self.assertEqual(len(requester._responses), 0) @@ -1805,10 +1760,8 @@ def test__SendMediaRequest_w_error(self): requester = _MakeRequest(response_1, response_2) with _Monkey(MUT, - http_wrapper=_Dummy( - RESUME_INCOMPLETE=RESUME_INCOMPLETE, - Request=_Request, - MakeRequest=requester)): + Request=_Request, + MakeRequest=requester): with self.assertRaises(HttpError): upload._SendMediaRequest(request, 9) diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 0f8fc48c9a21..781997bd29df 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -10,11 +10,25 @@ import six from six.moves import http_client -from gcloud.streaming import buffered_stream -from gcloud.streaming import exceptions -from gcloud.streaming import http_wrapper -from gcloud.streaming import stream_slice -from gcloud.streaming import util +from gcloud.streaming.buffered_stream import BufferedStream +from gcloud.streaming.exceptions import CommunicationError +from gcloud.streaming.exceptions import ConfigurationValueError +from gcloud.streaming.exceptions import HttpError +from gcloud.streaming.exceptions import InvalidDataError +from gcloud.streaming.exceptions import InvalidUserInputError +from gcloud.streaming.exceptions import NotFoundError +from gcloud.streaming.exceptions import TransferInvalidError +from gcloud.streaming.exceptions import TransferRetryError +from gcloud.streaming.exceptions import UserError +from gcloud.streaming.http_wrapper import GetHttp +from gcloud.streaming.http_wrapper import ( + HandleExceptionsAndRebuildHttpConnections) +from gcloud.streaming.http_wrapper import MakeRequest +from gcloud.streaming.http_wrapper import Request +from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE +from gcloud.streaming.stream_slice import StreamSlice +from gcloud.streaming.util import AcceptableMimeType +from gcloud.streaming.util import Typecheck __all__ = [ 'Download', @@ -49,7 +63,7 @@ def __init__(self, stream, close_stream=False, chunksize=None, self.num_retries = num_retries self.retry_func = ( - http_wrapper.HandleExceptionsAndRebuildHttpConnections) + HandleExceptionsAndRebuildHttpConnections) self.auto_transfer = auto_transfer self.chunksize = chunksize or 1048576 @@ -78,9 +92,9 @@ def num_retries(self): @num_retries.setter def num_retries(self, value): - util.Typecheck(value, six.integer_types) + Typecheck(value, six.integer_types) if value < 0: - raise exceptions.InvalidDataError( + raise InvalidDataError( 'Cannot have negative value for num_retries') self.__num_retries = value @@ -108,7 +122,7 @@ def _Initialize(self, http, url): """ self.EnsureUninitialized() if self.http is None: - self.__http = http or http_wrapper.GetHttp() + self.__http = http or GetHttp() self.__url = url @property @@ -121,12 +135,12 @@ def _type_name(self): def EnsureInitialized(self): if not self.initialized: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Cannot use uninitialized %s', self._type_name) def EnsureUninitialized(self): if self.initialized: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Cannot re-initialize %s', self._type_name) def __del__(self): @@ -161,7 +175,7 @@ def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds): """Create a new download object from a filename.""" path = os.path.expanduser(filename) if os.path.exists(path) and not overwrite: - raise exceptions.InvalidUserInputError( + raise InvalidUserInputError( 'File %s exists and overwrite not specified' % path) return cls(open(path, 'wb'), close_stream=True, auto_transfer=auto_transfer, **kwds) @@ -223,7 +237,7 @@ def InitializeDownload(self, http_request, http=None, client=None): """ self.EnsureUninitialized() if http is None and client is None: - raise exceptions.UserError('Must provide client or http.') + raise UserError('Must provide client or http.') http = http or client.http if client is not None: http_request.url = client.FinalizeTransferUrl(http_request.url) @@ -231,10 +245,10 @@ def InitializeDownload(self, http_request, http=None, client=None): if self.auto_transfer: end_byte = self._ComputeEndByte(0) self._SetRangeHeader(http_request, 0, end_byte) - response = http_wrapper.MakeRequest( + response = MakeRequest( self.bytes_http or http, http_request) if response.status_code not in self._ACCEPTABLE_STATUSES: - raise exceptions.HttpError.FromResponse(response) + raise HttpError.FromResponse(response) self._initial_response = response self._SetTotal(response.info) url = response.info.get('content-location', response.request_url) @@ -249,14 +263,14 @@ def InitializeDownload(self, http_request, http=None, client=None): def _NormalizeStartEnd(self, start, end=None): if end is not None: if start < 0: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Cannot have end index with negative start index') elif start >= self.total_size: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Cannot have start index greater than total size') end = min(end, self.total_size - 1) if end < start: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Range requested with end[%s] < start[%s]' % (end, start)) return start, end else: @@ -318,11 +332,11 @@ def _ComputeEndByte(self, start, end=None, use_chunks=True): def _GetChunk(self, start, end, additional_headers=None): """Retrieve a chunk, and return the full response.""" self.EnsureInitialized() - request = http_wrapper.Request(url=self.url) + request = Request(url=self.url) self._SetRangeHeader(request, start, end=end) if additional_headers is not None: request.headers.update(additional_headers) - return http_wrapper.MakeRequest( + return MakeRequest( self.bytes_http, request, retry_func=self.retry_func, retries=self.num_retries) @@ -333,9 +347,9 @@ def _ProcessResponse(self, response): # up the transfer versus something we should attempt again. if response.status_code in (http_client.FORBIDDEN, http_client.NOT_FOUND): - raise exceptions.HttpError.FromResponse(response) + raise HttpError.FromResponse(response) else: - raise exceptions.TransferRetryError(response.content) + raise TransferRetryError(response.content) if response.status_code in (http_client.OK, http_client.PARTIAL_CONTENT): self.stream.write(response.content) @@ -395,7 +409,7 @@ def GetRange(self, start, end=None, additional_headers=None, response = self._ProcessResponse(response) progress += response.length if response.length == 0: - raise exceptions.TransferRetryError( + raise TransferRetryError( 'Zero bytes unexpectedly returned in download response') def StreamInChunks(self, additional_headers=None): @@ -470,11 +484,11 @@ def FromFile(cls, filename, mime_type=None, auto_transfer=True, **kwds): """Create a new Upload object from a filename.""" path = os.path.expanduser(filename) if not os.path.exists(path): - raise exceptions.NotFoundError('Could not find file %s' % path) + raise NotFoundError('Could not find file %s' % path) if not mime_type: mime_type, _ = mimetypes.guess_type(path) if mime_type is None: - raise exceptions.InvalidUserInputError( + raise InvalidUserInputError( 'Could not guess mime type for %s' % path) size = os.stat(path).st_size return cls(open(path, 'rb'), mime_type, total_size=size, @@ -485,7 +499,7 @@ def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True, **kwds): """Create a new Upload object from a stream.""" if mime_type is None: - raise exceptions.InvalidUserInputError( + raise InvalidUserInputError( 'No mime_type specified for stream') return cls(stream, mime_type, total_size=total_size, close_stream=False, auto_transfer=auto_transfer, **kwds) @@ -509,7 +523,7 @@ def strategy(self): @strategy.setter def strategy(self, value): if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD): - raise exceptions.UserError(( + raise UserError(( 'Invalid value "%s" for upload strategy, must be one of ' '"simple" or "resumable".') % value) self.__strategy = value @@ -564,12 +578,12 @@ def ConfigureRequest(self, upload_config, http_request, url_builder): # Validate total_size vs. max_size if (self.total_size and upload_config.max_size and self.total_size > upload_config.max_size): - raise exceptions.InvalidUserInputError( + raise InvalidUserInputError( 'Upload too big: %s larger than max size %s' % ( self.total_size, upload_config.max_size)) # Validate mime type - if not util.AcceptableMimeType(upload_config.accept, self.mime_type): - raise exceptions.InvalidUserInputError( + if not AcceptableMimeType(upload_config.accept, self.mime_type): + raise InvalidUserInputError( 'MIME type %s does not match any accepted MIME ranges %s' % ( self.mime_type, upload_config.accept)) @@ -655,10 +669,10 @@ def RefreshResumableUploadState(self): # http://tools.ietf.org/html/rfc7231#section-4.3.4 # The violation is documented: # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload - refresh_request = http_wrapper.Request( + refresh_request = Request( url=self.url, http_method='PUT', headers={'Content-Range': 'bytes */*'}) - refresh_response = http_wrapper.MakeRequest( + refresh_response = MakeRequest( self.http, refresh_request, redirections=0, retries=self.num_retries) range_header = self._GetRangeHeaderFromResponse(refresh_response) @@ -671,14 +685,14 @@ def RefreshResumableUploadState(self): # originally requested. Cache it so it can be returned in # StreamInChunks. self._final_response = refresh_response - elif refresh_response.status_code == http_wrapper.RESUME_INCOMPLETE: + elif refresh_response.status_code == RESUME_INCOMPLETE: if range_header is None: self.__progress = 0 else: self.__progress = self._GetLastByte(range_header) + 1 self.stream.seek(self.progress) else: - raise exceptions.HttpError.FromResponse(refresh_response) + raise HttpError.FromResponse(refresh_response) def _GetRangeHeaderFromResponse(self, response): # XXX Per RFC 2616/7233, 'Range' is a request header, not a response @@ -695,20 +709,20 @@ def _GetRangeHeaderFromResponse(self, response): def InitializeUpload(self, http_request, http=None, client=None): """Initialize this upload from the given http_request.""" if self.strategy is None: - raise exceptions.UserError( + raise UserError( 'No upload strategy set; did you call ConfigureRequest?') if http is None and client is None: - raise exceptions.UserError('Must provide client or http.') + raise UserError('Must provide client or http.') if self.strategy != RESUMABLE_UPLOAD: return http = http or client.http if client is not None: http_request.url = client.FinalizeTransferUrl(http_request.url) self.EnsureUninitialized() - http_response = http_wrapper.MakeRequest(http, http_request, + http_response = MakeRequest(http, http_request, retries=self.num_retries) if http_response.status_code != http_client.OK: - raise exceptions.HttpError.FromResponse(http_response) + raise HttpError.FromResponse(http_response) # XXX when is this getting converted to an integer? granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity') @@ -737,14 +751,14 @@ def _ValidateChunksize(self, chunksize=None): return chunksize = chunksize or self.chunksize if chunksize % self._server_chunk_granularity: - raise exceptions.ConfigurationValueError( + raise ConfigurationValueError( 'Server requires chunksize to be a multiple of %d', self._server_chunk_granularity) def _StreamMedia(self, additional_headers=None, use_chunks=True): """Helper function for StreamMedia / StreamInChunks.""" if self.strategy != RESUMABLE_UPLOAD: - raise exceptions.InvalidUserInputError( + raise InvalidUserInputError( 'Cannot stream non-resumable upload') # final_response is set if we resumed an already-completed upload. response = self._final_response @@ -761,7 +775,7 @@ def _StreamMedia(self, additional_headers=None, use_chunks=True): self.__progress = self._GetLastByte(response.info['range']) if self.progress + 1 != self.stream.tell(): # TODO(craigcitro): Add a better way to recover here. - raise exceptions.CommunicationError( + raise CommunicationError( 'Failed to transfer all bytes in chunk, upload paused at ' 'byte %d' % self.progress) if self.complete and hasattr(self.stream, 'seek'): @@ -770,7 +784,7 @@ def _StreamMedia(self, additional_headers=None, use_chunks=True): end_pos = self.stream.tell() self.stream.seek(current_pos) if current_pos != end_pos: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Upload complete with %s additional bytes left in stream' % (int(end_pos) - int(current_pos))) return response @@ -794,16 +808,16 @@ def StreamInChunks(self, additional_headers=None): def _SendMediaRequest(self, request, end): """Request helper function for SendMediaBody & SendChunk.""" - response = http_wrapper.MakeRequest( + response = MakeRequest( self.bytes_http, request, retry_func=self.retry_func, retries=self.num_retries) if response.status_code not in (http_client.OK, http_client.CREATED, - http_wrapper.RESUME_INCOMPLETE): + RESUME_INCOMPLETE): # We want to reset our state to wherever the server left us # before this failed request, and then raise. self.RefreshResumableUploadState() - raise exceptions.HttpError.FromResponse(response) - if response.status_code == http_wrapper.RESUME_INCOMPLETE: + raise HttpError.FromResponse(response) + if response.status_code == RESUME_INCOMPLETE: last_byte = self._GetLastByte( self._GetRangeHeaderFromResponse(response)) if last_byte + 1 != end: @@ -814,12 +828,11 @@ def _SendMediaBody(self, start, additional_headers=None): """Send the entire media stream in a single request.""" self.EnsureInitialized() if self.total_size is None: - raise exceptions.TransferInvalidError( + raise TransferInvalidError( 'Total size must be known for SendMediaBody') - body_stream = stream_slice.StreamSlice( - self.stream, self.total_size - start) + body_stream = StreamSlice(self.stream, self.total_size - start) - request = http_wrapper.Request(url=self.url, http_method='PUT', + request = Request(url=self.url, http_method='PUT', body=body_stream) request.headers['Content-Type'] = self.mime_type if start == self.total_size: @@ -842,7 +855,7 @@ def _SendChunk(self, start, additional_headers=None): if self.total_size is None: # For the streaming resumable case, we need to detect when # we're at the end of the stream. - body_stream = buffered_stream.BufferedStream( + body_stream = BufferedStream( self.stream, start, self.chunksize) end = body_stream.stream_end_position if body_stream.stream_exhausted: @@ -855,10 +868,10 @@ def _SendChunk(self, start, additional_headers=None): body_stream = body_stream.read(self.chunksize) else: end = min(start + self.chunksize, self.total_size) - body_stream = stream_slice.StreamSlice(self.stream, end - start) + body_stream = StreamSlice(self.stream, end - start) # TODO(craigcitro): Think about clearer errors on "no data in # stream". - request = http_wrapper.Request(url=self.url, http_method='PUT', + request = Request(url=self.url, http_method='PUT', body=body_stream) request.headers['Content-Type'] = self.mime_type if no_log_body: diff --git a/gcloud/streaming/util.py b/gcloud/streaming/util.py index 1e07d7b90d84..fb8503986e9d 100644 --- a/gcloud/streaming/util.py +++ b/gcloud/streaming/util.py @@ -3,14 +3,14 @@ Pruned to include only helpers used by other vendored-in modules: -``gcloud._apidools.transfer`` uses: +:mod:`gcloud.streaming.http_wrapper` uses: -- Typecheck -- AcceptableMimeType +- :function:`CalculateWaitForRetry` -``gcloud.streaming.http_wrapper`` uses: +:mod:`gcloud._apidools.transfer` uses: -- CalculateWaitForRetry +- :function:`Typecheck` +- :function:`AcceptableMimeType` """ import random From 37f9ed37fd1c1e7d981d3767ef3f10bcd80e470d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 15:51:04 -0400 Subject: [PATCH 03/17] Give 'streaming.util' functions PEP8 names. --- gcloud/streaming/http_wrapper.py | 4 ++-- gcloud/streaming/test_util.py | 18 +++++++++--------- gcloud/streaming/transfer.py | 8 ++++---- gcloud/streaming/util.py | 22 ++++++++++++---------- 4 files changed, 27 insertions(+), 25 deletions(-) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index 9134355db588..330d5a7e3aa6 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -33,7 +33,7 @@ from gcloud.streaming.exceptions import BadStatusCodeError from gcloud.streaming.exceptions import RequestError from gcloud.streaming.exceptions import RetryAfterError -from gcloud.streaming.util import CalculateWaitForRetry +from gcloud.streaming.util import calculate_wait_for_retry __all__ = [ 'CheckResponse', @@ -286,7 +286,7 @@ def HandleExceptionsAndRebuildHttpConnections(retry_args): logging.debug('Retrying request to url %s after exception %s', retry_args.http_request.url, retry_args.exc) time.sleep( - retry_after or CalculateWaitForRetry( + retry_after or calculate_wait_for_retry( retry_args.num_retries, max_wait=retry_args.max_retry_wait)) diff --git a/gcloud/streaming/test_util.py b/gcloud/streaming/test_util.py index 3930eb2decf4..ec3bc0e53de5 100644 --- a/gcloud/streaming/test_util.py +++ b/gcloud/streaming/test_util.py @@ -2,11 +2,11 @@ import unittest2 -class Test_TypeCheck(unittest2.TestCase): +class Test_type_check(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.util import Typecheck - return Typecheck(*args, **kw) + from gcloud.streaming.util import type_check + return type_check(*args, **kw) def test_pass(self): self.assertEqual(self._callFUT(123, int), 123) @@ -28,11 +28,11 @@ def test_fail_w_tuple_no_msg(self): self._callFUT(123, (list, tuple)) -class Test_CalculateWaitForRetry(unittest2.TestCase): +class Test_calculate_wait_for_retry(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.util import CalculateWaitForRetry - return CalculateWaitForRetry(*args, **kw) + from gcloud.streaming.util import calculate_wait_for_retry + return calculate_wait_for_retry(*args, **kw) def test_w_negative_jitter_lt_max_wait(self): import random @@ -47,11 +47,11 @@ def test_w_positive_jitter_gt_max_wait(self): self.assertEqual(self._callFUT(4, 10), 10) -class Test_AcceptableMimeType(unittest2.TestCase): +class Test_acceptable_mime_type(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.util import AcceptableMimeType - return AcceptableMimeType(*args, **kw) + from gcloud.streaming.util import acceptable_mime_type + return acceptable_mime_type(*args, **kw) def test_pattern_wo_slash(self): from gcloud.streaming.exceptions import InvalidUserInputError diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 781997bd29df..8dce34725181 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -27,8 +27,8 @@ from gcloud.streaming.http_wrapper import Request from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud.streaming.stream_slice import StreamSlice -from gcloud.streaming.util import AcceptableMimeType -from gcloud.streaming.util import Typecheck +from gcloud.streaming.util import acceptable_mime_type +from gcloud.streaming.util import type_check __all__ = [ 'Download', @@ -92,7 +92,7 @@ def num_retries(self): @num_retries.setter def num_retries(self, value): - Typecheck(value, six.integer_types) + type_check(value, six.integer_types) if value < 0: raise InvalidDataError( 'Cannot have negative value for num_retries') @@ -582,7 +582,7 @@ def ConfigureRequest(self, upload_config, http_request, url_builder): 'Upload too big: %s larger than max size %s' % ( self.total_size, upload_config.max_size)) # Validate mime type - if not AcceptableMimeType(upload_config.accept, self.mime_type): + if not acceptable_mime_type(upload_config.accept, self.mime_type): raise InvalidUserInputError( 'MIME type %s does not match any accepted MIME ranges %s' % ( self.mime_type, upload_config.accept)) diff --git a/gcloud/streaming/util.py b/gcloud/streaming/util.py index fb8503986e9d..9ab423aa5cbf 100644 --- a/gcloud/streaming/util.py +++ b/gcloud/streaming/util.py @@ -5,20 +5,22 @@ :mod:`gcloud.streaming.http_wrapper` uses: -- :function:`CalculateWaitForRetry` +- :function:`calculate_wait_for_retry` :mod:`gcloud._apidools.transfer` uses: -- :function:`Typecheck` -- :function:`AcceptableMimeType` +- :function:`type_check` +- :function:`acceptable_mime_type` """ import random -from gcloud.streaming import exceptions +from gcloud.streaming.exceptions import GeneratedClientError +from gcloud.streaming.exceptions import InvalidUserInputError +from gcloud.streaming.exceptions import TypecheckError -def Typecheck(arg, arg_type, msg=None): +def type_check(arg, arg_type, msg=None): if not isinstance(arg, arg_type): if msg is None: if isinstance(arg_type, tuple): @@ -26,11 +28,11 @@ def Typecheck(arg, arg_type, msg=None): type(arg), arg_type) else: msg = 'Type of arg is "%s", not "%s"' % (type(arg), arg_type) - raise exceptions.TypecheckError(msg) + raise TypecheckError(msg) return arg -def CalculateWaitForRetry(retry_attempt, max_wait=60): +def calculate_wait_for_retry(retry_attempt, max_wait=60): """Calculates amount of time to wait before a retry attempt. Wait time grows exponentially with the number of attempts. A @@ -52,7 +54,7 @@ def CalculateWaitForRetry(retry_attempt, max_wait=60): return max(1, min(wait_time, max_wait)) -def AcceptableMimeType(accept_patterns, mime_type): +def acceptable_mime_type(accept_patterns, mime_type): """Return True iff mime_type is acceptable for one of accept_patterns. Note that this function assumes that all patterns in accept_patterns @@ -68,11 +70,11 @@ def AcceptableMimeType(accept_patterns, mime_type): Whether or not mime_type matches (at least) one of these patterns. """ if '/' not in mime_type: - raise exceptions.InvalidUserInputError( + raise InvalidUserInputError( 'Invalid MIME type: "%s"' % mime_type) unsupported_patterns = [p for p in accept_patterns if ';' in p] if unsupported_patterns: - raise exceptions.GeneratedClientError( + raise GeneratedClientError( 'MIME patterns with parameter unsupported: "%s"' % ', '.join( unsupported_patterns)) From beb8b84a146a6983ac2ee9f40f0c19d182437600 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 16:04:28 -0400 Subject: [PATCH 04/17] Give 'streaming.http_wrapper' functions PEP8 names. --- gcloud/storage/blob.py | 6 ++-- gcloud/streaming/http_wrapper.py | 22 ++++++------ gcloud/streaming/test_http_wrapper.py | 18 +++++----- gcloud/streaming/test_transfer.py | 50 +++++++++++++-------------- gcloud/streaming/transfer.py | 22 ++++++------ 5 files changed, 58 insertions(+), 60 deletions(-) diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index 0671285ccf87..24dfcb599095 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -35,7 +35,7 @@ from gcloud.storage._helpers import _scalar_property from gcloud.storage.acl import ObjectACL from gcloud.streaming.http_wrapper import Request -from gcloud.streaming.http_wrapper import MakeRequest +from gcloud.streaming.http_wrapper import make_api_request _API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' @@ -411,8 +411,8 @@ def upload_from_file(self, file_obj, rewind=False, size=None, if upload.strategy == transfer.RESUMABLE_UPLOAD: http_response = upload.StreamInChunks() else: - http_response = MakeRequest(connection.http, request, - retries=num_retries) + http_response = make_api_request(connection.http, request, + retries=num_retries) response_content = http_response.content if not isinstance(response_content, six.string_types): # pragma: NO COVER Python3 diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index 330d5a7e3aa6..6b2dd7b021d8 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -8,13 +8,13 @@ :mod:`gcloud.storage.blob` uses: - :class:`Request` -- :function:`MakeRequest` +- :function:`make_api_request` :mod:`gcloud._apidools.transfer` uses: -- :function:`GetHttp` -- :function:`HandleExceptionsAndRebuildHttpConnections` -- :function:`MakeRequest` +- :function:`get_http` +- :function:`handle_http_exceptions` +- :function:`make_api_request` - :class:`Request` - :const:`RESUME_INCOMPLETE` """ @@ -37,9 +37,9 @@ __all__ = [ 'CheckResponse', - 'GetHttp', - 'HandleExceptionsAndRebuildHttpConnections', - 'MakeRequest', + 'get_http', + 'handle_http_exceptions', + 'make_api_request', 'RebuildHttpConnections', 'Request', 'Response', @@ -236,7 +236,7 @@ def RebuildHttpConnections(http): del http.connections[conn_key] -def HandleExceptionsAndRebuildHttpConnections(retry_args): +def handle_http_exceptions(retry_args): """Exception handler for http failures. This catches known failures and rebuilds the underlying HTTP connections. @@ -337,9 +337,9 @@ def _MakeRequestNoRetry(http, http_request, redirections=5, return response -def MakeRequest(http, http_request, retries=7, max_retry_wait=60, +def make_api_request(http, http_request, retries=7, max_retry_wait=60, redirections=5, - retry_func=HandleExceptionsAndRebuildHttpConnections, + retry_func=handle_http_exceptions, check_response_func=CheckResponse, wo_retry_func=_MakeRequestNoRetry): """Send http_request via the given http, performing error/retry handling. @@ -391,7 +391,7 @@ def _RegisterHttpFactory(factory): _HTTP_FACTORIES.append(factory) -def GetHttp(**kwds): +def get_http(**kwds): for factory in _HTTP_FACTORIES: http = factory(**kwds) if http is not None: diff --git a/gcloud/streaming/test_http_wrapper.py b/gcloud/streaming/test_http_wrapper.py index ea7cf6aa6b74..0bf493ea90fb 100644 --- a/gcloud/streaming/test_http_wrapper.py +++ b/gcloud/streaming/test_http_wrapper.py @@ -255,13 +255,13 @@ def test_w_connections(self): self.assertTrue('skip_me' in connections) -class Test_HandleExceptionsAndRebuildHttpConnections(unittest2.TestCase): +class Test_handle_http_exceptions(unittest2.TestCase): URL = 'http://example.com/api' def _callFUT(self, *args, **kw): from gcloud.streaming.http_wrapper import ( - HandleExceptionsAndRebuildHttpConnections) - return HandleExceptionsAndRebuildHttpConnections(*args, **kw) + handle_http_exceptions) + return handle_http_exceptions(*args, **kw) def _monkeyMUT(self): from gcloud._testing import _Monkey @@ -612,11 +612,11 @@ def test_w_request_returning_None(self): self._verify_requested(_http, _request, connection_type=CONN_TYPE) -class Test_MakeRequest(unittest2.TestCase): +class Test_make_api_request(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.http_wrapper import MakeRequest - return MakeRequest(*args, **kw) + from gcloud.streaming.http_wrapper import make_api_request + return make_api_request(*args, **kw) def test_wo_exception(self): HTTP, REQUEST, RESPONSE = object(), object(), object() @@ -739,11 +739,11 @@ def test_it(self): self.assertEqual(_factories, [FACTORY]) -class Test_GetHttp(unittest2.TestCase): +class Test_get_http(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.http_wrapper import GetHttp - return GetHttp(*args, **kw) + from gcloud.streaming.http_wrapper import get_http + return get_http(*args, **kw) def test_wo_registered_factories(self): from httplib2 import Http diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index 39924dd780da..49addf608694 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -307,7 +307,7 @@ def test_InitializeDownload_w_autotransfer_failing(self): requester = _MakeRequest(response) with _Monkey(MUT, - MakeRequest=requester): + make_api_request=requester): with self.assertRaises(HttpError): download.InitializeDownload(request, http) @@ -327,7 +327,7 @@ def test_InitializeDownload_w_autotransfer_w_content_location(self): response = _makeResponse(http_client.NO_CONTENT, info) requester = _MakeRequest(response) - with _Monkey(MUT, MakeRequest=requester): + with _Monkey(MUT, make_api_request=requester): download.InitializeDownload(request, http) self.assertTrue(download._initial_response is None) @@ -451,7 +451,7 @@ def test__GetChunk_wo_additional_headers(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): found = download._GetChunk(0, 10) self.assertTrue(found is response) @@ -473,7 +473,7 @@ def test__GetChunk_w_additional_headers(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): found = download._GetChunk(0, 10, additional_headers=headers) self.assertTrue(found is response) @@ -579,7 +579,7 @@ def test_GetRange_wo_total_size_complete(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.GetRange(0, LEN) self.assertTrue(len(requester._requested), 1) @@ -609,7 +609,7 @@ def test_GetRange_wo_total_size_wo_end(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.GetRange(START) self.assertTrue(len(requester._requested), 1) @@ -641,7 +641,7 @@ def test_GetRange_w_total_size_partial_w_additional_headers(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.GetRange(0, PARTIAL_LEN, additional_headers=headers) self.assertTrue(len(requester._requested), 1) @@ -672,7 +672,7 @@ def test_GetRange_w_empty_chunk(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): with self.assertRaises(TransferRetryError): download.GetRange(START) @@ -702,7 +702,7 @@ def test_GetRange_w_total_size_wo_use_chunks(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.GetRange(0, use_chunks=False) self.assertTrue(len(requester._requested), 1) @@ -738,7 +738,7 @@ def test_GetRange_w_multiple_chunks(self): with _Monkey(MUT, Request=lambda url: _requests.pop(0), - MakeRequest=requester): + make_api_request=requester): download.GetRange(0) self.assertTrue(len(requester._requested), 2) @@ -828,7 +828,7 @@ def test_StreamMedia_w_initial_response_incomplete(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.StreamMedia() self.assertTrue(len(requester._requested), 1) @@ -859,7 +859,7 @@ def test_StreamMedia_wo_initial_response_wo_total_size(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.StreamMedia() self.assertTrue(len(requester._requested), 1) @@ -890,7 +890,7 @@ def test_StreamMedia_wo_initial_response_w_addl_headers_wo_chunks(self): with _Monkey(MUT, Request=lambda url: request, - MakeRequest=requester): + make_api_request=requester): download.StreamMedia(additional_headers=headers, use_chunks=False) self.assertTrue(len(requester._requested), 1) @@ -1266,7 +1266,7 @@ def test_RefreshResumableUploadState_w_OK(self): with _Monkey(MUT, Request=_Request, - MakeRequest=requester): + make_api_request=requester): upload.RefreshResumableUploadState() self.assertTrue(upload.complete) @@ -1293,7 +1293,7 @@ def test_RefreshResumableUploadState_w_CREATED(self): with _Monkey(MUT, Request=_Request, - MakeRequest=requester): + make_api_request=requester): upload.RefreshResumableUploadState() self.assertTrue(upload.complete) @@ -1320,7 +1320,7 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_w_range(self): with _Monkey(MUT, Request=_Request, - MakeRequest=requester): + make_api_request=requester): upload.RefreshResumableUploadState() self.assertFalse(upload.complete) @@ -1346,7 +1346,7 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_wo_range(self): with _Monkey(MUT, Request=_Request, - MakeRequest=requester): + make_api_request=requester): upload.RefreshResumableUploadState() self.assertFalse(upload.complete) @@ -1372,7 +1372,7 @@ def test_RefreshResumableUploadState_w_error(self): with _Monkey(MUT, Request=_Request, - MakeRequest=requester): + make_api_request=requester): with self.assertRaises(HttpError): upload.RefreshResumableUploadState() @@ -1438,7 +1438,7 @@ def test_InitializeUpload_w_http_resumable_not_initialized_w_error(self): response = _makeResponse(http_client.FORBIDDEN) requester = _MakeRequest(response) - with _Monkey(MUT, MakeRequest=requester): + with _Monkey(MUT, make_api_request=requester): with self.assertRaises(HttpError): upload.InitializeUpload(request, http=object()) @@ -1456,7 +1456,7 @@ def test_InitializeUpload_w_http_wo_auto_transfer_w_OK(self): response = _makeResponse(http_client.OK, info) requester = _MakeRequest(response) - with _Monkey(MUT, MakeRequest=requester): + with _Monkey(MUT, make_api_request=requester): upload.InitializeUpload(request, http=object()) self.assertEqual(upload._server_chunk_granularity, None) @@ -1487,7 +1487,7 @@ def test_InitializeUpload_w_client_w_auto_transfer_w_OK(self): with _Monkey(MUT, Request=lambda url, http_method, body: _Request(url, http_method, body), - MakeRequest=requester): + make_api_request=requester): upload.InitializeUpload(request, client=client) self.assertEqual(upload._server_chunk_granularity, 100) @@ -1615,7 +1615,7 @@ def test__StreamMedia_incomplete(self): with _Monkey(MUT, Request=lambda url, http_method, body: _Request(url, http_method, body), - MakeRequest=requester): + make_api_request=requester): response = upload._StreamMedia() self.assertEqual(len(requester._responses), 0) @@ -1661,7 +1661,7 @@ def test__StreamMedia_incomplete_w_transfer_error_w_addl_headers(self): with _Monkey(MUT, Request=lambda url, http_method, body: _Request(url, http_method, body), - MakeRequest=requester): + make_api_request=requester): with self.assertRaises(CommunicationError): upload._StreamMedia(additional_headers=headers) @@ -1724,7 +1724,7 @@ def test__SendMediaRequest_wo_error(self): response = _makeResponse(RESUME_INCOMPLETE, info) requester = _MakeRequest(response) - with _Monkey(MUT, MakeRequest=requester): + with _Monkey(MUT, make_api_request=requester): upload._SendMediaRequest(request, 9) self.assertEqual(len(requester._responses), 0) @@ -1761,7 +1761,7 @@ def test__SendMediaRequest_w_error(self): with _Monkey(MUT, Request=_Request, - MakeRequest=requester): + make_api_request=requester): with self.assertRaises(HttpError): upload._SendMediaRequest(request, 9) diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 8dce34725181..472f7f9ec171 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -20,10 +20,9 @@ from gcloud.streaming.exceptions import TransferInvalidError from gcloud.streaming.exceptions import TransferRetryError from gcloud.streaming.exceptions import UserError -from gcloud.streaming.http_wrapper import GetHttp -from gcloud.streaming.http_wrapper import ( - HandleExceptionsAndRebuildHttpConnections) -from gcloud.streaming.http_wrapper import MakeRequest +from gcloud.streaming.http_wrapper import get_http +from gcloud.streaming.http_wrapper import handle_http_exceptions +from gcloud.streaming.http_wrapper import make_api_request from gcloud.streaming.http_wrapper import Request from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud.streaming.stream_slice import StreamSlice @@ -62,8 +61,7 @@ def __init__(self, stream, close_stream=False, chunksize=None, # Let the @property do validation self.num_retries = num_retries - self.retry_func = ( - HandleExceptionsAndRebuildHttpConnections) + self.retry_func = handle_http_exceptions self.auto_transfer = auto_transfer self.chunksize = chunksize or 1048576 @@ -122,7 +120,7 @@ def _Initialize(self, http, url): """ self.EnsureUninitialized() if self.http is None: - self.__http = http or GetHttp() + self.__http = http or get_http() self.__url = url @property @@ -245,7 +243,7 @@ def InitializeDownload(self, http_request, http=None, client=None): if self.auto_transfer: end_byte = self._ComputeEndByte(0) self._SetRangeHeader(http_request, 0, end_byte) - response = MakeRequest( + response = make_api_request( self.bytes_http or http, http_request) if response.status_code not in self._ACCEPTABLE_STATUSES: raise HttpError.FromResponse(response) @@ -336,7 +334,7 @@ def _GetChunk(self, start, end, additional_headers=None): self._SetRangeHeader(request, start, end=end) if additional_headers is not None: request.headers.update(additional_headers) - return MakeRequest( + return make_api_request( self.bytes_http, request, retry_func=self.retry_func, retries=self.num_retries) @@ -672,7 +670,7 @@ def RefreshResumableUploadState(self): refresh_request = Request( url=self.url, http_method='PUT', headers={'Content-Range': 'bytes */*'}) - refresh_response = MakeRequest( + refresh_response = make_api_request( self.http, refresh_request, redirections=0, retries=self.num_retries) range_header = self._GetRangeHeaderFromResponse(refresh_response) @@ -719,7 +717,7 @@ def InitializeUpload(self, http_request, http=None, client=None): if client is not None: http_request.url = client.FinalizeTransferUrl(http_request.url) self.EnsureUninitialized() - http_response = MakeRequest(http, http_request, + http_response = make_api_request(http, http_request, retries=self.num_retries) if http_response.status_code != http_client.OK: raise HttpError.FromResponse(http_response) @@ -808,7 +806,7 @@ def StreamInChunks(self, additional_headers=None): def _SendMediaRequest(self, request, end): """Request helper function for SendMediaBody & SendChunk.""" - response = MakeRequest( + response = make_api_request( self.bytes_http, request, retry_func=self.retry_func, retries=self.num_retries) if response.status_code not in (http_client.OK, http_client.CREATED, From b6b764d2935f048082d1a18708a655d2b3ec7834 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 16:09:20 -0400 Subject: [PATCH 05/17] Further import cleanup / documentation. --- gcloud/storage/blob.py | 14 +++++++------- gcloud/streaming/transfer.py | 13 ++++++++++++- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index 24dfcb599095..2b9abf4312e7 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -25,8 +25,6 @@ import six from six.moves.urllib.parse import quote # pylint: disable=F0401 -from gcloud.streaming import transfer - from gcloud._helpers import _RFC3339_MICROS from gcloud._helpers import UTC from gcloud.credentials import generate_signed_url @@ -36,6 +34,9 @@ from gcloud.storage.acl import ObjectACL from gcloud.streaming.http_wrapper import Request from gcloud.streaming.http_wrapper import make_api_request +from gcloud.streaming.transfer import Download +from gcloud.streaming.transfer import RESUMABLE_UPLOAD +from gcloud.streaming.transfer import Upload _API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' @@ -259,7 +260,7 @@ def download_to_file(self, file_obj, client=None): download_url = self.media_link # Use apitools 'Download' facility. - download = transfer.Download.FromStream(file_obj, auto_transfer=False) + download = Download.FromStream(file_obj, auto_transfer=False) headers = {} if self.chunk_size is not None: download.chunksize = self.chunk_size @@ -384,9 +385,8 @@ def upload_from_file(self, file_obj, rewind=False, size=None, 'User-Agent': connection.USER_AGENT, } - upload = transfer.Upload(file_obj, content_type, total_bytes, - auto_transfer=False, - chunksize=self.chunk_size) + upload = Upload(file_obj, content_type, total_bytes, + auto_transfer=False, chunksize=self.chunk_size) url_builder = _UrlBuilder(bucket_name=self.bucket.name, object_name=self.name) @@ -408,7 +408,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None, query_params=query_params) upload.InitializeUpload(request, connection.http) - if upload.strategy == transfer.RESUMABLE_UPLOAD: + if upload.strategy == RESUMABLE_UPLOAD: http_response = upload.StreamInChunks() else: http_response = make_api_request(connection.http, request, diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 472f7f9ec171..3af956960182 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -1,5 +1,16 @@ # pylint: skip-file -"""Upload and download support for apitools.""" +"""Upload and download support for apitools. + +:mod:`gcloud.storage.blob` uses: + +- :class:`Download` +- :class:`Upload` +- :const:`RESUMABLE_UPLOAD` + +:mod:`gcloud.storage.test_blob` patches: + +- :const:`_RESUMABLE_UPLOAD_THRESHOLD` +""" import email.generator as email_generator import email.mime.multipart as mime_multipart From 2cf253bfb8445d9fccb9ffe68260dd6082d1e703 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 16:10:53 -0400 Subject: [PATCH 06/17] Remove deleted APIs from 'streaming.transfer.__all__'. --- gcloud/streaming/transfer.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 3af956960182..9ce6693a48be 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -45,10 +45,6 @@ 'Upload', 'RESUMABLE_UPLOAD', 'SIMPLE_UPLOAD', - 'DownloadProgressPrinter', - 'DownloadCompletePrinter', - 'UploadProgressPrinter', - 'UploadCompletePrinter', ] _RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 From bf3f70632c0cf81a555be760b94abf7d1f9cb1c3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 16:12:31 -0400 Subject: [PATCH 07/17] Make 'streaming.transfer.RESUMABLE_UPLOAD_THRESHOLD' public. --- gcloud/storage/test_blob.py | 2 +- gcloud/streaming/test_transfer.py | 4 ++-- gcloud/streaming/transfer.py | 7 ++++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/gcloud/storage/test_blob.py b/gcloud/storage/test_blob.py index dd96c15d10e0..e0bfa8b54a04 100644 --- a/gcloud/storage/test_blob.py +++ b/gcloud/storage/test_blob.py @@ -439,7 +439,7 @@ def test_upload_from_file_resumable(self): blob.chunk_size = 5 # Set the threshhold low enough that we force a resumable uploada. - with _Monkey(transfer, _RESUMABLE_UPLOAD_THRESHOLD=5): + with _Monkey(transfer, RESUMABLE_UPLOAD_THRESHOLD=5): with _NamedTemporaryFile() as temp: with open(temp.name, 'wb') as file_obj: file_obj.write(DATA) diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index 49addf608694..21989dc79e5b 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -1076,12 +1076,12 @@ def test__SetDefaultUploadStrategy_wo_resumable_path(self): self.assertEqual(upload.strategy, SIMPLE_UPLOAD) def test__SetDefaultUploadStrategy_w_total_size_gt_threshhold(self): - from gcloud.streaming.transfer import _RESUMABLE_UPLOAD_THRESHOLD + from gcloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD from gcloud.streaming.transfer import RESUMABLE_UPLOAD config = _UploadConfig() request = _Request() upload = self._makeOne( - _Stream(), total_size=_RESUMABLE_UPLOAD_THRESHOLD + 1) + _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) upload._SetDefaultUploadStrategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 9ce6693a48be..45f4888cdec2 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -9,7 +9,7 @@ :mod:`gcloud.storage.test_blob` patches: -- :const:`_RESUMABLE_UPLOAD_THRESHOLD` +- :const:`RESUMABLE_UPLOAD_THRESHOLD` """ import email.generator as email_generator @@ -44,10 +44,11 @@ 'Download', 'Upload', 'RESUMABLE_UPLOAD', + 'RESUMABLE_UPLOAD_THRESHOLD', 'SIMPLE_UPLOAD', ] -_RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 +RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 SIMPLE_UPLOAD = 'simple' RESUMABLE_UPLOAD = 'resumable' @@ -570,7 +571,7 @@ def _SetDefaultUploadStrategy(self, upload_config, http_request): return strategy = SIMPLE_UPLOAD if (self.total_size is not None and - self.total_size > _RESUMABLE_UPLOAD_THRESHOLD): + self.total_size > RESUMABLE_UPLOAD_THRESHOLD): strategy = RESUMABLE_UPLOAD if http_request.body and not upload_config.simple_multipart: strategy = RESUMABLE_UPLOAD From 3033ca681b7cf139cf3292c810f862fdf2ebb8e7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 16:15:30 -0400 Subject: [PATCH 08/17] Adjust visibility / names of 'streaming.http_wrapper' helpers / constants. --- gcloud/streaming/http_wrapper.py | 36 +++++++++++++-------------- gcloud/streaming/test_http_wrapper.py | 24 +++++++++--------- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index 6b2dd7b021d8..c79dcdcbb0bd 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -36,13 +36,13 @@ from gcloud.streaming.util import calculate_wait_for_retry __all__ = [ - 'CheckResponse', 'get_http', 'handle_http_exceptions', 'make_api_request', - 'RebuildHttpConnections', 'Request', 'Response', + 'RESUME_INCOMPLETE', + 'TOO_MANY_REQUESTS', ] @@ -61,9 +61,9 @@ # http_request: A http_wrapper.Request. # exc: Exception being raised. # num_retries: Number of retries consumed; used for exponential backoff. -ExceptionRetryArgs = collections.namedtuple( - 'ExceptionRetryArgs', ['http', 'http_request', 'exc', 'num_retries', - 'max_retry_wait']) +_ExceptionRetryArgs = collections.namedtuple( + '_ExceptionRetryArgs', ['http', 'http_request', 'exc', 'num_retries', + 'max_retry_wait']) @contextlib.contextmanager @@ -173,7 +173,7 @@ def length(self): Returns: Response length (as int or long) """ - def ProcessContentRange(content_range): + def _process_content_range(content_range): _, _, range_spec = content_range.partition(' ') byte_range, _, _ = range_spec.partition('/') start, _, end = byte_range.partition('-') @@ -183,11 +183,11 @@ def ProcessContentRange(content_range): # httplib2 rewrites content-length in the case of a compressed # transfer; we can't trust the content-length header in that # case, but we *can* trust content-range, if it's present. - return ProcessContentRange(self.info['content-range']) + return _process_content_range(self.info['content-range']) elif 'content-length' in self.info: return int(self.info.get('content-length')) elif 'content-range' in self.info: - return ProcessContentRange(self.info['content-range']) + return _process_content_range(self.info['content-range']) return len(self.content) @property @@ -205,7 +205,7 @@ def is_redirect(self): 'location' in self.info) -def CheckResponse(response): +def _check_response(response): if response is None: # Caller shouldn't call us if the response is None, but handle anyway. raise RequestError( @@ -217,7 +217,7 @@ def CheckResponse(response): raise RetryAfterError.FromResponse(response) -def RebuildHttpConnections(http): +def _rebuild_http_connections(http): """Rebuilds all http connections in the httplib2.Http instance. httplib2 overloads the map in http.connections to contain two different @@ -242,7 +242,7 @@ def handle_http_exceptions(retry_args): This catches known failures and rebuilds the underlying HTTP connections. Args: - retry_args: An ExceptionRetryArgs tuple. + retry_args: An _ExceptionRetryArgs tuple. """ # If the server indicates how long to wait, use that value. Otherwise, # calculate the wait time on our own. @@ -282,7 +282,7 @@ def handle_http_exceptions(retry_args): retry_after = retry_args.exc.retry_after else: raise - RebuildHttpConnections(retry_args.http) + _rebuild_http_connections(retry_args.http) logging.debug('Retrying request to url %s after exception %s', retry_args.http_request.url, retry_args.exc) time.sleep( @@ -290,8 +290,8 @@ def handle_http_exceptions(retry_args): retry_args.num_retries, max_wait=retry_args.max_retry_wait)) -def _MakeRequestNoRetry(http, http_request, redirections=5, - check_response_func=CheckResponse): +def _make_api_request_no_retry(http, http_request, redirections=5, + check_response_func=_check_response): """Send http_request via the given http. This wrapper exists to handle translation between the plain httplib2 @@ -340,8 +340,8 @@ def _MakeRequestNoRetry(http, http_request, redirections=5, def make_api_request(http, http_request, retries=7, max_retry_wait=60, redirections=5, retry_func=handle_http_exceptions, - check_response_func=CheckResponse, - wo_retry_func=_MakeRequestNoRetry): + check_response_func=_check_response, + wo_retry_func=_make_api_request_no_retry): """Send http_request via the given http, performing error/retry handling. Args: @@ -380,14 +380,14 @@ def make_api_request(http, http_request, retries=7, max_retry_wait=60, if retry >= retries: raise else: - retry_func(ExceptionRetryArgs( + retry_func(_ExceptionRetryArgs( http, http_request, e, retry, max_retry_wait)) _HTTP_FACTORIES = [] -def _RegisterHttpFactory(factory): +def _register_http_factory(factory): _HTTP_FACTORIES.append(factory) diff --git a/gcloud/streaming/test_http_wrapper.py b/gcloud/streaming/test_http_wrapper.py index 0bf493ea90fb..182dc48d06c4 100644 --- a/gcloud/streaming/test_http_wrapper.py +++ b/gcloud/streaming/test_http_wrapper.py @@ -200,11 +200,11 @@ def test_is_redirect_w_code_w_location(self): self.assertTrue(response.is_redirect) -class Test_CheckResponse(unittest2.TestCase): +class Test__check_response(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.http_wrapper import CheckResponse - return CheckResponse(*args, **kw) + from gcloud.streaming.http_wrapper import _check_response + return _check_response(*args, **kw) def test_w_none(self): from gcloud.streaming.exceptions import RequestError @@ -237,11 +237,11 @@ def test_pass(self): self._callFUT(_Response(200)) -class Test_RebuildHttpConnections(unittest2.TestCase): +class Test__rebuild_http_connections(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.http_wrapper import RebuildHttpConnections - return RebuildHttpConnections(*args, **kw) + from gcloud.streaming.http_wrapper import _rebuild_http_connections + return _rebuild_http_connections(*args, **kw) def test_wo_connections(self): http = object() @@ -497,11 +497,11 @@ def _raises(): self._callFUT(retry_args) -class Test__MakeRequestNoRetry(unittest2.TestCase): +class Test___make_api_request_no_retry(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.http_wrapper import _MakeRequestNoRetry - return _MakeRequestNoRetry(*args, **kw) + from gcloud.streaming.http_wrapper import _make_api_request_no_retry + return _make_api_request_no_retry(*args, **kw) def _verify_requested(self, http, request, redirections=5, connection_type=None): @@ -721,11 +721,11 @@ def _retry(args): self.assertEqual(retry.max_retry_wait, WAIT) -class Test__RegisterHttpFactory(unittest2.TestCase): +class Test__register_http_factory(unittest2.TestCase): def _callFUT(self, *args, **kw): - from gcloud.streaming.http_wrapper import _RegisterHttpFactory - return _RegisterHttpFactory(*args, **kw) + from gcloud.streaming.http_wrapper import _register_http_factory + return _register_http_factory(*args, **kw) def test_it(self): from gcloud._testing import _Monkey From 54cb870c82b26b8526f5b9f8fb8c7cf858fed1ab Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 16:28:22 -0400 Subject: [PATCH 09/17] Strip out unused/redundant exception types. --- gcloud/streaming/exceptions.py | 34 ++------------------------------ gcloud/streaming/http_wrapper.py | 3 --- gcloud/streaming/test_util.py | 4 ++-- gcloud/streaming/util.py | 4 ++-- 4 files changed, 6 insertions(+), 39 deletions(-) diff --git a/gcloud/streaming/exceptions.py b/gcloud/streaming/exceptions.py index a9055dab82a1..97a3adf8ebe0 100644 --- a/gcloud/streaming/exceptions.py +++ b/gcloud/streaming/exceptions.py @@ -21,7 +21,7 @@ :mod:`gcloud._apidools.util` uses: -- :exc:`GeneratedClientError` +- :exc:`ConfigurationValueError` - :exc:`InvalidUserInputError` - :exc:`TypecheckError` """ @@ -89,41 +89,11 @@ class InvalidUserInputError(InvalidDataError): """User-provided input is invalid.""" -class InvalidDataFromServerError(InvalidDataError, CommunicationError): - - """Data received from the server is malformed.""" - - -class BatchError(Error): - - """Error generated while constructing a batch request.""" - - -class ConfigurationError(Error): - - """Base class for configuration errors.""" - - -class GeneratedClientError(Error): - - """The generated client configuration is invalid.""" - - class ConfigurationValueError(UserError): """Some part of the user-specified client configuration is invalid.""" -class ResourceUnavailableError(Error): - - """User requested an unavailable resource.""" - - -class CredentialsError(Error): - - """Errors related to invalid credentials.""" - - class TransferError(CommunicationError): """Errors related to transfers.""" @@ -163,7 +133,7 @@ class BadStatusCodeError(HttpError): """The request completed but returned a bad status code.""" -class NotYetImplementedError(GeneratedClientError): +class NotYetImplementedError(Error): """This functionality is not yet implemented.""" diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index c79dcdcbb0bd..981809eb08de 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -360,9 +360,6 @@ def make_api_request(http, http_request, retries=7, max_retry_wait=60, wo_retry_func: Function to make HTTP request without retries. Arguments are: (http, http_request, redirections, check_response_func) - Raises: - InvalidDataFromServerError: if there is no response after retries. - Returns: A Response object. diff --git a/gcloud/streaming/test_util.py b/gcloud/streaming/test_util.py index ec3bc0e53de5..0922334d29ed 100644 --- a/gcloud/streaming/test_util.py +++ b/gcloud/streaming/test_util.py @@ -62,8 +62,8 @@ def test_pattern_wo_slash(self): ('Invalid MIME type: "BOGUS"',)) def test_accept_pattern_w_semicolon(self): - from gcloud.streaming.exceptions import GeneratedClientError - with self.assertRaises(GeneratedClientError) as err: + from gcloud.streaming.exceptions import ConfigurationValueError + with self.assertRaises(ConfigurationValueError) as err: self._callFUT(['text/*;charset=utf-8'], 'text/plain') self.assertEqual( err.exception.args, diff --git a/gcloud/streaming/util.py b/gcloud/streaming/util.py index 9ab423aa5cbf..ecdd44170bea 100644 --- a/gcloud/streaming/util.py +++ b/gcloud/streaming/util.py @@ -15,7 +15,7 @@ import random -from gcloud.streaming.exceptions import GeneratedClientError +from gcloud.streaming.exceptions import ConfigurationValueError from gcloud.streaming.exceptions import InvalidUserInputError from gcloud.streaming.exceptions import TypecheckError @@ -74,7 +74,7 @@ def acceptable_mime_type(accept_patterns, mime_type): 'Invalid MIME type: "%s"' % mime_type) unsupported_patterns = [p for p in accept_patterns if ';' in p] if unsupported_patterns: - raise GeneratedClientError( + raise ConfigurationValueError( 'MIME patterns with parameter unsupported: "%s"' % ', '.join( unsupported_patterns)) From e22993e05c775988e7b633fe7f8e99bfef97a52f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 17:33:18 -0400 Subject: [PATCH 10/17] Give 'streaming.transfer' methos PEP8 names. --- gcloud/storage/blob.py | 16 +- gcloud/streaming/test_transfer.py | 694 ++++++++++++------------------ gcloud/streaming/transfer.py | 204 ++++----- 3 files changed, 378 insertions(+), 536 deletions(-) diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index 2b9abf4312e7..fb7357ccd1c2 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -260,7 +260,7 @@ def download_to_file(self, file_obj, client=None): download_url = self.media_link # Use apitools 'Download' facility. - download = Download.FromStream(file_obj, auto_transfer=False) + download = Download.from_stream(file_obj, auto_transfer=False) headers = {} if self.chunk_size is not None: download.chunksize = self.chunk_size @@ -273,9 +273,9 @@ def download_to_file(self, file_obj, client=None): # object. The rest (API_BASE_URL and build_api_url) are also defined # on the Batch class, but we just use the wrapped connection since # it has all three (http, API_BASE_URL and build_api_url). - download.InitializeDownload(request, client._connection.http) + download.initialize_download(request, client._connection.http) - download.StreamInChunks() + download.stream_file(use_chunks=True) def download_to_filename(self, filename, client=None): """Download the contents of this blob into a named file. @@ -400,16 +400,16 @@ def upload_from_file(self, file_obj, rewind=False, size=None, # Use apitools 'Upload' facility. request = Request(upload_url, 'POST', headers) - upload.ConfigureRequest(upload_config, request, url_builder) + upload.configure_request(upload_config, request, url_builder) query_params = url_builder.query_params base_url = connection.API_BASE_URL + '/upload' request.url = connection.build_api_url(api_base_url=base_url, path=self.bucket.path + '/o', query_params=query_params) - upload.InitializeUpload(request, connection.http) + upload.initialize_upload(request, connection.http) if upload.strategy == RESUMABLE_UPLOAD: - http_response = upload.StreamInChunks() + http_response = upload.stream_file(use_chunks=True) else: http_response = make_api_request(connection.http, request, retries=num_retries) @@ -766,7 +766,7 @@ def updated(self): class _UploadConfig(object): - """Faux message FBO apitools' 'ConfigureRequest'. + """Faux message FBO apitools' 'configure_request'. Values extracted from apitools 'samples/storage_sample/storage/storage_v1_client.py' @@ -780,7 +780,7 @@ class _UploadConfig(object): class _UrlBuilder(object): - """Faux builder FBO apitools' 'ConfigureRequest'""" + """Faux builder FBO apitools' 'configure_request'""" def __init__(self, bucket_name, object_name): self.query_params = {'name': object_name} self._bucket_name = bucket_name diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index 21989dc79e5b..8ab971e603b3 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -24,7 +24,6 @@ def test_ctor_defaults(self): self.assertEqual(xfer.num_retries, 5) self.assertTrue(xfer.url is None) self.assertFalse(xfer.initialized) - self.assertEqual(xfer._type_name, '_Transfer') def test_ctor_explicit(self): stream = _Stream() @@ -73,70 +72,70 @@ def test_num_retries_setter_negative(self): with self.assertRaises(InvalidDataError): xfer.num_retries = -1 - def test__Initialize_not_already_initialized_w_http(self): + def test__initialize_not_already_initialized_w_http(self): HTTP = object() stream = _Stream() xfer = self._makeOne(stream) - xfer._Initialize(HTTP, self.URL) + xfer._initialize(HTTP, self.URL) self.assertTrue(xfer.initialized) self.assertTrue(xfer.http is HTTP) self.assertTrue(xfer.url is self.URL) - def test__Initialize_not_already_initialized_wo_http(self): + def test__initialize_not_already_initialized_wo_http(self): from httplib2 import Http stream = _Stream() xfer = self._makeOne(stream) - xfer._Initialize(None, self.URL) + xfer._initialize(None, self.URL) self.assertTrue(xfer.initialized) self.assertTrue(isinstance(xfer.http, Http)) self.assertTrue(xfer.url is self.URL) - def test__Initialize_w_existing_http(self): + def test__initialize_w_existing_http(self): HTTP_1, HTTP_2 = object(), object() stream = _Stream() xfer = self._makeOne(stream, http=HTTP_1) - xfer._Initialize(HTTP_2, self.URL) + xfer._initialize(HTTP_2, self.URL) self.assertTrue(xfer.initialized) self.assertTrue(xfer.http is HTTP_1) self.assertTrue(xfer.url is self.URL) - def test__Initialize_already_initialized(self): + def test__initialize_already_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError URL_2 = 'http://example.com/other' HTTP_1, HTTP_2 = object(), object() stream = _Stream() xfer = self._makeOne(stream) - xfer._Initialize(HTTP_1, self.URL) + xfer._initialize(HTTP_1, self.URL) with self.assertRaises(TransferInvalidError): - xfer._Initialize(HTTP_2, URL_2) + xfer._initialize(HTTP_2, URL_2) - def test_EnsureInitialized_hit(self): + def test__ensure_initialized_hit(self): HTTP = object() stream = _Stream() xfer = self._makeOne(stream) - xfer._Initialize(HTTP, self.URL) - xfer.EnsureInitialized() # no raise + xfer._initialize(HTTP, self.URL) + xfer._ensure_initialized() # no raise - def test_EnsureInitialized_miss(self): + def test__ensure_initialized_miss(self): from gcloud.streaming.exceptions import TransferInvalidError stream = _Stream() xfer = self._makeOne(stream) with self.assertRaises(TransferInvalidError): - xfer.EnsureInitialized() + xfer._ensure_initialized() - def test_EnsureUninitialized_hit(self): + def test__ensure_uninitialized_hit(self): stream = _Stream() xfer = self._makeOne(stream) - xfer.EnsureUninitialized() # no raise + xfer._ensure_uninitialized() # no raise - def test_EnsureUninitialized_miss(self): + def test__ensure_uninitialized_miss(self): from gcloud.streaming.exceptions import TransferInvalidError stream = _Stream() HTTP = object() xfer = self._makeOne(stream) - xfer._Initialize(HTTP, self.URL) + xfer._initialize(HTTP, self.URL) with self.assertRaises(TransferInvalidError): - xfer.EnsureUninitialized() + xfer._ensure_uninitialized() def test___del___closes_stream(self): @@ -181,7 +180,7 @@ def test_ctor_w_total_size(self): self.assertTrue(download.stream is stream) self.assertEqual(download.total_size, SIZE) - def test_FromFile_w_existing_file_no_override(self): + def test_from_file_w_existing_file_no_override(self): import os from gcloud.streaming.exceptions import InvalidUserInputError klass = self._getTargetClass() @@ -190,111 +189,111 @@ def test_FromFile_w_existing_file_no_override(self): with open(filename, 'w') as fileobj: fileobj.write('EXISTING FILE') with self.assertRaises(InvalidUserInputError): - klass.FromFile(filename) + klass.from_file(filename) - def test_FromFile_w_existing_file_w_override_wo_auto_transfer(self): + def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): import os klass = self._getTargetClass() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') with open(filename, 'w') as fileobj: fileobj.write('EXISTING FILE') - download = klass.FromFile(filename, overwrite=True, + download = klass.from_file(filename, overwrite=True, auto_transfer=False) self.assertFalse(download.auto_transfer) del download # closes stream with open(filename, 'rb') as fileobj: self.assertEqual(fileobj.read(), b'') - def test_FromStream_defaults(self): + def test_from_stream_defaults(self): stream = _Stream() klass = self._getTargetClass() - download = klass.FromStream(stream) + download = klass.from_stream(stream) self.assertTrue(download.stream is stream) self.assertTrue(download.auto_transfer) self.assertTrue(download.total_size is None) - def test_FromStream_explicit(self): + def test_from_stream_explicit(self): CHUNK_SIZE = 1 << 18 SIZE = 123 stream = _Stream() klass = self._getTargetClass() - download = klass.FromStream(stream, auto_transfer=False, + download = klass.from_stream(stream, auto_transfer=False, total_size=SIZE, chunksize=CHUNK_SIZE) self.assertTrue(download.stream is stream) self.assertFalse(download.auto_transfer) self.assertEqual(download.total_size, SIZE) self.assertEqual(download.chunksize, CHUNK_SIZE) - def test_ConfigureRequest(self): + def test_configure_request(self): CHUNK_SIZE = 100 download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) request = _Dummy(headers={}) url_builder = _Dummy(query_params={}) - download.ConfigureRequest(request, url_builder) + download.configure_request(request, url_builder) self.assertEqual(request.headers, {'Range': 'bytes=0-99'}) self.assertEqual(url_builder.query_params, {'alt': 'media'}) - def test__SetTotal_wo_content_range_wo_existing_total(self): + def test__set_total_wo_content_range_wo_existing_total(self): info = {} download = self._makeOne(_Stream()) - download._SetTotal(info) + download._set_total(info) self.assertEqual(download.total_size, 0) - def test__SetTotal_wo_content_range_w_existing_total(self): + def test__set_total_wo_content_range_w_existing_total(self): SIZE = 123 info = {} download = self._makeOne(_Stream(), total_size=SIZE) - download._SetTotal(info) + download._set_total(info) self.assertEqual(download.total_size, SIZE) - def test__SetTotal_w_content_range_w_existing_total(self): + def test__set_total_w_content_range_w_existing_total(self): SIZE = 123 info = {'content-range': 'bytes 123-234/4567'} download = self._makeOne(_Stream(), total_size=SIZE) - download._SetTotal(info) + download._set_total(info) self.assertEqual(download.total_size, 4567) - def test__SetTotal_w_content_range_w_asterisk_total(self): + def test__set_total_w_content_range_w_asterisk_total(self): info = {'content-range': 'bytes 123-234/*'} download = self._makeOne(_Stream()) - download._SetTotal(info) + download._set_total(info) self.assertEqual(download.total_size, 0) - def test_InitializeDownload_already_initialized(self): + def test_initialize_download_already_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError request = _Request() download = self._makeOne(_Stream()) - download._Initialize(None, self.URL) + download._initialize(None, self.URL) with self.assertRaises(TransferInvalidError): - download.InitializeDownload(request, http=object()) + download.initialize_download(request, http=object()) - def test_InitializeDownload_wo_http_or_client(self): + def test_initialize_download_wo_http_or_client(self): from gcloud.streaming.exceptions import UserError request = _Request() download = self._makeOne(_Stream()) with self.assertRaises(UserError): - download.InitializeDownload(request) + download.initialize_download(request) - def test_InitializeDownload_wo_client_wo_autotransfer(self): + def test_initialize_download_wo_client_wo_autotransfer(self): request = _Request() http = object() download = self._makeOne(_Stream(), auto_transfer=False) - download.InitializeDownload(request, http) + download.initialize_download(request, http) self.assertTrue(download.http is http) self.assertEqual(download.url, request.url) - def test_InitializeDownload_w_client_wo_autotransfer(self): + def test_initialize_download_w_client_wo_autotransfer(self): FINALIZED_URL = 'http://example.com/other' request = _Request() http = object() client = _Client(http, FINALIZED_URL) download = self._makeOne(_Stream(), auto_transfer=False) - download.InitializeDownload(request, client=client) + download.initialize_download(request, client=client) self.assertTrue(download.http is http) self.assertEqual(download.url, FINALIZED_URL) - def test_InitializeDownload_w_autotransfer_failing(self): + def test_initialize_download_w_autotransfer_failing(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -309,12 +308,12 @@ def test_InitializeDownload_w_autotransfer_failing(self): with _Monkey(MUT, make_api_request=requester): with self.assertRaises(HttpError): - download.InitializeDownload(request, http) + download.initialize_download(request, http) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) - def test_InitializeDownload_w_autotransfer_w_content_location(self): + def test_initialize_download_w_autotransfer_w_content_location(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -328,7 +327,7 @@ def test_InitializeDownload_w_autotransfer_w_content_location(self): requester = _MakeRequest(response) with _Monkey(MUT, make_api_request=requester): - download.InitializeDownload(request, http) + download.initialize_download(request, http) self.assertTrue(download._initial_response is None) self.assertEqual(download.total_size, 0) @@ -337,230 +336,207 @@ def test_InitializeDownload_w_autotransfer_w_content_location(self): self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) - def test__NormalizeStartEnd_w_end_w_start_lt_0(self): + def test__normalize_start_end_w_end_w_start_lt_0(self): from gcloud.streaming.exceptions import TransferInvalidError request = _Request() download = self._makeOne(_Stream()) with self.assertRaises(TransferInvalidError): - download._NormalizeStartEnd(-1, 0) + download._normalize_start_end(-1, 0) - def test__NormalizeStartEnd_w_end_w_start_gt_total(self): + def test__normalize_start_end_w_end_w_start_gt_total(self): from gcloud.streaming.exceptions import TransferInvalidError request = _Request() download = self._makeOne(_Stream()) - download._SetTotal({'content-range': 'bytes 0-1/2'}) + download._set_total({'content-range': 'bytes 0-1/2'}) with self.assertRaises(TransferInvalidError): - download._NormalizeStartEnd(3, 0) + download._normalize_start_end(3, 0) - def test__NormalizeStartEnd_w_end_lt_start(self): + def test__normalize_start_end_w_end_lt_start(self): from gcloud.streaming.exceptions import TransferInvalidError request = _Request() download = self._makeOne(_Stream()) - download._SetTotal({'content-range': 'bytes 0-1/2'}) + download._set_total({'content-range': 'bytes 0-1/2'}) with self.assertRaises(TransferInvalidError): - download._NormalizeStartEnd(1, 0) + download._normalize_start_end(1, 0) - def test__NormalizeStartEnd_w_end_gt_start(self): + def test__normalize_start_end_w_end_gt_start(self): request = _Request() download = self._makeOne(_Stream()) - download._SetTotal({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._NormalizeStartEnd(1, 2), (1, 1)) + download._set_total({'content-range': 'bytes 0-1/2'}) + self.assertEqual(download._normalize_start_end(1, 2), (1, 1)) - def test__NormalizeStartEnd_wo_end_w_start_lt_0(self): + def test__normalize_start_end_wo_end_w_start_lt_0(self): request = _Request() download = self._makeOne(_Stream()) - download._SetTotal({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._NormalizeStartEnd(-2), (0, 1)) - self.assertEqual(download._NormalizeStartEnd(-1), (1, 1)) + download._set_total({'content-range': 'bytes 0-1/2'}) + self.assertEqual(download._normalize_start_end(-2), (0, 1)) + self.assertEqual(download._normalize_start_end(-1), (1, 1)) - def test__NormalizeStartEnd_wo_end_w_start_ge_0(self): + def test__normalize_start_end_wo_end_w_start_ge_0(self): request = _Request() download = self._makeOne(_Stream()) - download._SetTotal({'content-range': 'bytes 0-1/100'}) - self.assertEqual(download._NormalizeStartEnd(0), (0, 99)) - self.assertEqual(download._NormalizeStartEnd(1), (1, 99)) + download._set_total({'content-range': 'bytes 0-1/100'}) + self.assertEqual(download._normalize_start_end(0), (0, 99)) + self.assertEqual(download._normalize_start_end(1), (1, 99)) - def test__SetRangeHeader_w_start_lt_0(self): + def test__set_range_header_w_start_lt_0(self): request = _Request() download = self._makeOne(_Stream()) - download._SetRangeHeader(request, -1) + download._set_range_header(request, -1) self.assertEqual(request.headers['range'], 'bytes=-1') - def test__SetRangeHeader_w_start_ge_0_wo_end(self): + def test__set_range_header_w_start_ge_0_wo_end(self): request = _Request() download = self._makeOne(_Stream()) - download._SetRangeHeader(request, 0) + download._set_range_header(request, 0) self.assertEqual(request.headers['range'], 'bytes=0-') - def test__SetRangeHeader_w_start_ge_0_w_end(self): + def test__set_range_header_w_start_ge_0_w_end(self): request = _Request() download = self._makeOne(_Stream()) - download._SetRangeHeader(request, 0, 1) + download._set_range_header(request, 0, 1) self.assertEqual(request.headers['range'], 'bytes=0-1') - def test__ComputeEndByte_w_start_lt_0_w_end(self): + def test__compute_end_byte_w_start_lt_0_w_end(self): download = self._makeOne(_Stream()) - self.assertEqual(download._ComputeEndByte(-1, 1), 1) + self.assertEqual(download._compute_end_byte(-1, 1), 1) - def test__ComputeEndByte_w_start_ge_0_wo_end_w_use_chunks(self): + def test__compute_end_byte_w_start_ge_0_wo_end_w_use_chunks(self): CHUNK_SIZE = 5 download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._ComputeEndByte(0, use_chunks=True), 4) + self.assertEqual(download._compute_end_byte(0, use_chunks=True), 4) - def test__ComputeEndByte_w_start_ge_0_w_end_w_use_chunks(self): + def test__compute_end_byte_w_start_ge_0_w_end_w_use_chunks(self): CHUNK_SIZE = 5 download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._ComputeEndByte(0, 3, use_chunks=True), 3) - self.assertEqual(download._ComputeEndByte(0, 5, use_chunks=True), 4) + self.assertEqual(download._compute_end_byte(0, 3, use_chunks=True), 3) + self.assertEqual(download._compute_end_byte(0, 5, use_chunks=True), 4) - def test__ComputeEndByte_w_start_ge_0_w_end_w_total_size(self): + def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): CHUNK_SIZE = 50 download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) - download._SetTotal({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._ComputeEndByte(0, 100, use_chunks=False), 9) - self.assertEqual(download._ComputeEndByte(0, 8, use_chunks=False), 8) + download._set_total({'content-range': 'bytes 0-1/10'}) + self.assertEqual(download._compute_end_byte(0, 100, use_chunks=False), 9) + self.assertEqual(download._compute_end_byte(0, 8, use_chunks=False), 8) - def test__ComputeEndByte_w_start_ge_0_wo_end_w_total_size(self): + def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): CHUNK_SIZE = 50 download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) - download._SetTotal({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._ComputeEndByte(0, use_chunks=False), 9) + download._set_total({'content-range': 'bytes 0-1/10'}) + self.assertEqual(download._compute_end_byte(0, use_chunks=False), 9) - def test__GetChunk_not_initialized(self): + def test__get_chunk_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError request = _Request() http = object() download = self._makeOne(_Stream()) with self.assertRaises(TransferInvalidError): - found = download._GetChunk(0, 10) + found = download._get_chunk(0, 10) - def test__GetChunk_wo_additional_headers(self): + def test__get_chunk(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT request = _Request() http = object() download = self._makeOne(_Stream()) - download._Initialize(http, request.URL) + download._initialize(http, request.URL) response = _makeResponse(http_client.OK) requester = _MakeRequest(response) with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - found = download._GetChunk(0, 10) + found = download._get_chunk(0, 10) self.assertTrue(found is response) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) self.assertEqual(request.headers['range'], 'bytes=0-10') - def test__GetChunk_w_additional_headers(self): - from six.moves import http_client - from gcloud._testing import _Monkey - from gcloud.streaming import transfer as MUT - request = _Request() - http = object() - headers = {'foo': 'bar'} - download = self._makeOne(_Stream()) - download._Initialize(http, request.URL) - response = _makeResponse(http_client.OK) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=lambda url: request, - make_api_request=requester): - found = download._GetChunk(0, 10, additional_headers=headers) - - self.assertTrue(found is response) - self.assertTrue(len(requester._requested), 1) - self.assertTrue(requester._requested[0][0] is request) - self.assertEqual(request.headers['range'], 'bytes=0-10') - self.assertEqual(request.headers['foo'], 'bar') - - def test__ProcessResponse_w_FORBIDDEN(self): + def test__process_response_w_FORBIDDEN(self): from gcloud.streaming.exceptions import HttpError from six.moves import http_client download = self._makeOne(_Stream()) response = _makeResponse(http_client.FORBIDDEN) with self.assertRaises(HttpError): - download._ProcessResponse(response) + download._process_response(response) - def test__ProcessResponse_w_NOT_FOUND(self): + def test__process_response_w_NOT_FOUND(self): from gcloud.streaming.exceptions import HttpError from six.moves import http_client download = self._makeOne(_Stream()) response = _makeResponse(http_client.NOT_FOUND) with self.assertRaises(HttpError): - download._ProcessResponse(response) + download._process_response(response) - def test__ProcessResponse_w_other_error(self): + def test__process_response_w_other_error(self): from gcloud.streaming.exceptions import TransferRetryError from six.moves import http_client download = self._makeOne(_Stream()) response = _makeResponse(http_client.BAD_REQUEST) with self.assertRaises(TransferRetryError): - download._ProcessResponse(response) + download._process_response(response) - def test__ProcessResponse_w_OK_wo_encoding(self): + def test__process_response_w_OK_wo_encoding(self): from six.moves import http_client stream = _Stream() download = self._makeOne(stream) response = _makeResponse(http_client.OK, content='OK') - found = download._ProcessResponse(response) + found = download._process_response(response) self.assertTrue(found is response) self.assertEqual(stream._written, ['OK']) self.assertEqual(download.progress, 2) self.assertEqual(download.encoding, None) - def test__ProcessResponse_w_PARTIAL_CONTENT_w_encoding(self): + def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): from six.moves import http_client stream = _Stream() download = self._makeOne(stream) info = {'content-encoding': 'blah'} response = _makeResponse(http_client.OK, info, 'PARTIAL') - found = download._ProcessResponse(response) + found = download._process_response(response) self.assertTrue(found is response) self.assertEqual(stream._written, ['PARTIAL']) self.assertEqual(download.progress, 7) self.assertEqual(download.encoding, 'blah') - def test__ProcessResponse_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): + def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): from six.moves import http_client stream = _Stream() download = self._makeOne(stream) response = _makeResponse( http_client.REQUESTED_RANGE_NOT_SATISFIABLE) - found = download._ProcessResponse(response) + found = download._process_response(response) self.assertTrue(found is response) self.assertEqual(stream._written, []) self.assertEqual(download.progress, 0) self.assertEqual(download.encoding, None) - def test__ProcessResponse_w_NO_CONTENT(self): + def test__process_response_w_NO_CONTENT(self): from six.moves import http_client stream = _Stream() download = self._makeOne(stream) response = _makeResponse(status_code=http_client.NO_CONTENT) - found = download._ProcessResponse(response) + found = download._process_response(response) self.assertTrue(found is response) self.assertEqual(stream._written, ['']) self.assertEqual(download.progress, 0) self.assertEqual(download.encoding, None) - def test_GetRange_not_initialized(self): + def test_get_range_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError request = _Request() http = object() download = self._makeOne(_Stream()) with self.assertRaises(TransferInvalidError): - found = download.GetRange(0, 10) + found = download.get_range(0, 10) - def test_GetRange_wo_total_size_complete(self): + def test_get_range_wo_total_size_complete(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -572,7 +548,7 @@ def test_GetRange_wo_total_size_complete(self): http = object() stream = _Stream() download = self._makeOne(stream) - download._Initialize(http, request.URL) + download._initialize(http, request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) requester = _MakeRequest(response) @@ -580,7 +556,7 @@ def test_GetRange_wo_total_size_complete(self): with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - download.GetRange(0, LEN) + download.get_range(0, LEN) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) @@ -588,7 +564,7 @@ def test_GetRange_wo_total_size_complete(self): self.assertEqual(stream._written, [CONTENT]) self.assertEqual(download.total_size, LEN) - def test_GetRange_wo_total_size_wo_end(self): + def test_get_range_wo_total_size_wo_end(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -602,7 +578,7 @@ def test_GetRange_wo_total_size_wo_end(self): http = object() stream = _Stream() download = self._makeOne(stream, chunksize=CHUNK_SIZE) - download._Initialize(http, request.URL) + download._initialize(http, request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT[START:]) requester = _MakeRequest(response) @@ -610,7 +586,7 @@ def test_GetRange_wo_total_size_wo_end(self): with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - download.GetRange(START) + download.get_range(START) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) @@ -618,7 +594,7 @@ def test_GetRange_wo_total_size_wo_end(self): self.assertEqual(stream._written, [CONTENT[START:]]) self.assertEqual(download.total_size, LEN) - def test_GetRange_w_total_size_partial_w_additional_headers(self): + def test_get_range_w_total_size_partial(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -628,11 +604,10 @@ def test_GetRange_w_total_size_partial_w_additional_headers(self): REQ_RANGE = 'bytes=0-%d' % (PARTIAL_LEN,) RESP_RANGE = 'bytes 0-%d/%d' % (PARTIAL_LEN, LEN,) request = _Request() - headers = {'foo': 'bar'} http = object() stream = _Stream() download = self._makeOne(stream, total_size=LEN) - download._Initialize(http, request.URL) + download._initialize(http, request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT[:PARTIAL_LEN]) @@ -642,15 +617,15 @@ def test_GetRange_w_total_size_partial_w_additional_headers(self): with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - download.GetRange(0, PARTIAL_LEN, additional_headers=headers) + download.get_range(0, PARTIAL_LEN) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) - self.assertEqual(request.headers, {'foo': 'bar', 'range': REQ_RANGE}) + self.assertEqual(request.headers, {'range': REQ_RANGE}) self.assertEqual(stream._written, [CONTENT[:PARTIAL_LEN]]) self.assertEqual(download.total_size, LEN) - def test_GetRange_w_empty_chunk(self): + def test_get_range_w_empty_chunk(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -665,7 +640,7 @@ def test_GetRange_w_empty_chunk(self): http = object() stream = _Stream() download = self._makeOne(stream, chunksize=CHUNK_SIZE) - download._Initialize(http, request.URL) + download._initialize(http, request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info) requester = _MakeRequest(response) @@ -674,7 +649,7 @@ def test_GetRange_w_empty_chunk(self): Request=lambda url: request, make_api_request=requester): with self.assertRaises(TransferRetryError): - download.GetRange(START) + download.get_range(START) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) @@ -682,7 +657,7 @@ def test_GetRange_w_empty_chunk(self): self.assertEqual(stream._written, ['']) self.assertEqual(download.total_size, LEN) - def test_GetRange_w_total_size_wo_use_chunks(self): + def test_get_range_w_total_size_wo_use_chunks(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -695,7 +670,7 @@ def test_GetRange_w_total_size_wo_use_chunks(self): http = object() stream = _Stream() download = self._makeOne(stream, total_size=LEN, chunksize=CHUNK_SIZE) - download._Initialize(http, request.URL) + download._initialize(http, request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) requester = _MakeRequest(response) @@ -703,7 +678,7 @@ def test_GetRange_w_total_size_wo_use_chunks(self): with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - download.GetRange(0, use_chunks=False) + download.get_range(0, use_chunks=False) self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) @@ -711,7 +686,7 @@ def test_GetRange_w_total_size_wo_use_chunks(self): self.assertEqual(stream._written, [CONTENT]) self.assertEqual(download.total_size, LEN) - def test_GetRange_w_multiple_chunks(self): + def test_get_range_w_multiple_chunks(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -727,7 +702,7 @@ def test_GetRange_w_multiple_chunks(self): http = object() stream = _Stream() download = self._makeOne(stream, chunksize=CHUNK_SIZE) - download._Initialize(http, request_1.URL) + download._initialize(http, request_1.URL) info_1 = {'content-range': RESP_RANGE_1} response_1 = _makeResponse(http_client.PARTIAL_CONTENT, info_1, CONTENT[:CHUNK_SIZE]) @@ -739,7 +714,7 @@ def test_GetRange_w_multiple_chunks(self): with _Monkey(MUT, Request=lambda url: _requests.pop(0), make_api_request=requester): - download.GetRange(0) + download.get_range(0) self.assertTrue(len(requester._requested), 2) self.assertTrue(requester._requested[0][0] is request_1) @@ -749,41 +724,14 @@ def test_GetRange_w_multiple_chunks(self): self.assertEqual(stream._written, [b'ABC', b'DE']) self.assertEqual(download.total_size, LEN) - def test_StreamInChunks_wo_additional_headers(self): - download = self._makeOne(_Stream()) - _called_with = [] - - def _stream_media(additional_headers=None, use_chunks=False): - _called_with.append((additional_headers, use_chunks)) - - download.StreamMedia = _stream_media - - download.StreamInChunks() - - self.assertEqual(_called_with, [(None, True)]) - - def test_StreamInChunks_w_additional_headers(self): - download = self._makeOne(_Stream()) - _called_with = [] - - def _stream_media(additional_headers=None, use_chunks=False): - _called_with.append((additional_headers, use_chunks)) - - download.StreamMedia = _stream_media - - headers = {'foo': 'bar'} - download.StreamInChunks(headers) - - self.assertEqual(_called_with, [(headers, True)]) - - def test_StreamMedia_not_initialized(self): + def test_stream_file_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError download = self._makeOne(_Stream()) with self.assertRaises(TransferInvalidError): - found = download.StreamMedia() + found = download.stream_file() - def test_StreamMedia_w_initial_response_complete(self): + def test_stream_file_w_initial_response_complete(self): from six.moves import http_client CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) @@ -794,14 +742,14 @@ def test_StreamMedia_w_initial_response_complete(self): download._initial_response = _makeResponse( http_client.OK, info, CONTENT) http = object() - download._Initialize(http, _Request.URL) + download._initialize(http, _Request.URL) - download.StreamMedia() + download.stream_file() self.assertEqual(stream._written, [CONTENT]) self.assertEqual(download.total_size, LEN) - def test_StreamMedia_w_initial_response_incomplete(self): + def test_stream_file_w_initial_response_incomplete(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -822,14 +770,14 @@ def test_StreamMedia_w_initial_response_incomplete(self): http_client.OK, info_2, CONTENT[CHUNK_SIZE:]) requester = _MakeRequest(response_2) - download._Initialize(http, _Request.URL) + download._initialize(http, _Request.URL) request = _Request() with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - download.StreamMedia() + download.stream_file() self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) @@ -838,7 +786,7 @@ def test_StreamMedia_w_initial_response_incomplete(self): [CONTENT[:CHUNK_SIZE], CONTENT[CHUNK_SIZE:]]) self.assertEqual(download.total_size, LEN) - def test_StreamMedia_wo_initial_response_wo_total_size(self): + def test_stream_file_wo_initial_response_wo_total_size(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -853,14 +801,14 @@ def test_StreamMedia_wo_initial_response_wo_total_size(self): info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) requester = _MakeRequest(response) - download._Initialize(http, _Request.URL) + download._initialize(http, _Request.URL) request = _Request() with _Monkey(MUT, Request=lambda url: request, make_api_request=requester): - download.StreamMedia() + download.stream_file() self.assertTrue(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) @@ -868,37 +816,6 @@ def test_StreamMedia_wo_initial_response_wo_total_size(self): self.assertEqual(stream._written, [CONTENT]) self.assertEqual(download.total_size, LEN) - def test_StreamMedia_wo_initial_response_w_addl_headers_wo_chunks(self): - from six.moves import http_client - from gcloud._testing import _Monkey - from gcloud.streaming import transfer as MUT - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=0-' - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._makeOne(stream, chunksize=CHUNK_SIZE) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - download._Initialize(http, _Request.URL) - - headers = {'foo': 'bar'} - request = _Request() - - with _Monkey(MUT, - Request=lambda url: request, - make_api_request=requester): - download.StreamMedia(additional_headers=headers, use_chunks=False) - - self.assertTrue(len(requester._requested), 1) - self.assertTrue(requester._requested[0][0] is request) - self.assertEqual(request.headers, {'foo': 'bar', 'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - class Test_Upload(unittest2.TestCase): URL = "http://example.com/api" @@ -932,14 +849,14 @@ def test_ctor_w_kwds(self): self.assertEqual(upload.mime_type, self.MIME_TYPE) self.assertEqual(upload.chunksize, CHUNK_SIZE) - def test_FromFile_w_nonesuch_file(self): + def test_from_file_w_nonesuch_file(self): from gcloud.streaming.exceptions import NotFoundError klass = self._getTargetClass() filename = '~nosuchuser/file.txt' with self.assertRaises(NotFoundError): - klass.FromFile(filename) + klass.from_file(filename) - def test_FromFile_wo_mimetype_w_unguessable_filename(self): + def test_from_file_wo_mimetype_w_unguessable_filename(self): import os from gcloud.streaming.exceptions import InvalidUserInputError klass = self._getTargetClass() @@ -949,9 +866,9 @@ def test_FromFile_wo_mimetype_w_unguessable_filename(self): with open(filename, 'wb') as fileobj: fileobj.write(CONTENT) with self.assertRaises(InvalidUserInputError): - klass.FromFile(filename) + klass.from_file(filename) - def test_FromFile_wo_mimetype_w_guessable_filename(self): + def test_from_file_wo_mimetype_w_guessable_filename(self): import os klass = self._getTargetClass() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' @@ -959,12 +876,12 @@ def test_FromFile_wo_mimetype_w_guessable_filename(self): filename = os.path.join(tempdir, 'file.txt') with open(filename, 'wb') as fileobj: fileobj.write(CONTENT) - upload = klass.FromFile(filename) + upload = klass.from_file(filename) self.assertEqual(upload.mime_type, 'text/plain') self.assertTrue(upload.auto_transfer) self.assertEqual(upload.total_size, len(CONTENT)) - def test_FromFile_w_mimetype_w_auto_transfer_w_kwds(self): + def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): import os klass = self._getTargetClass() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' @@ -973,7 +890,7 @@ def test_FromFile_w_mimetype_w_auto_transfer_w_kwds(self): filename = os.path.join(tempdir, 'file.unguessable') with open(filename, 'wb') as fileobj: fileobj.write(CONTENT) - upload = klass.FromFile( + upload = klass.from_file( filename, mime_type=self.MIME_TYPE, auto_transfer=False, @@ -983,27 +900,27 @@ def test_FromFile_w_mimetype_w_auto_transfer_w_kwds(self): self.assertEqual(upload.total_size, len(CONTENT)) self.assertEqual(upload.chunksize, CHUNK_SIZE) - def test_FromStream_wo_mimetype(self): + def test_from_stream_wo_mimetype(self): from gcloud.streaming.exceptions import InvalidUserInputError klass = self._getTargetClass() stream = _Stream() with self.assertRaises(InvalidUserInputError): - klass.FromStream(stream, mime_type=None) + klass.from_stream(stream, mime_type=None) - def test_FromStream_defaults(self): + def test_from_stream_defaults(self): klass = self._getTargetClass() stream = _Stream() - upload = klass.FromStream(stream, mime_type=self.MIME_TYPE) + upload = klass.from_stream(stream, mime_type=self.MIME_TYPE) self.assertEqual(upload.mime_type, self.MIME_TYPE) self.assertTrue(upload.auto_transfer) self.assertEqual(upload.total_size, None) - def test_FromStream_explicit(self): + def test_from_stream_explicit(self): klass = self._getTargetClass() stream = _Stream() SIZE = 10 CHUNK_SIZE = 3 - upload = klass.FromStream( + upload = klass.from_stream( stream, mime_type=self.MIME_TYPE, auto_transfer=False, @@ -1039,7 +956,7 @@ def test_total_size_setter_initialized(self): SIZE = 123 upload = self._makeOne(_Stream) http = object() - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) with self.assertRaises(TransferInvalidError): upload.total_size = SIZE @@ -1050,7 +967,7 @@ def test_total_size_setter_not_initialized(self): upload.total_size = SIZE self.assertEqual(upload.total_size, SIZE) - def test__SetDefaultUploadStrategy_w_existing_strategy(self): + def test__set_default_strategy_w_existing_strategy(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD config = _Dummy( resumable_path='/resumable/endpoint', @@ -1060,10 +977,10 @@ def test__SetDefaultUploadStrategy_w_existing_strategy(self): request = _Request() upload = self._makeOne(_Stream) upload.strategy = RESUMABLE_UPLOAD - upload._SetDefaultUploadStrategy(config, request) + upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - def test__SetDefaultUploadStrategy_wo_resumable_path(self): + def test__set_default_strategy_wo_resumable_path(self): from gcloud.streaming.transfer import SIMPLE_UPLOAD config = _Dummy( resumable_path=None, @@ -1072,49 +989,49 @@ def test__SetDefaultUploadStrategy_wo_resumable_path(self): ) request = _Request() upload = self._makeOne(_Stream()) - upload._SetDefaultUploadStrategy(config, request) + upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - def test__SetDefaultUploadStrategy_w_total_size_gt_threshhold(self): + def test__set_default_strategy_w_total_size_gt_threshhold(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD from gcloud.streaming.transfer import RESUMABLE_UPLOAD config = _UploadConfig() request = _Request() upload = self._makeOne( _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) - upload._SetDefaultUploadStrategy(config, request) + upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - def test__SetDefaultUploadStrategy_w_body_wo_multipart(self): + def test__set_default_strategy_w_body_wo_multipart(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() config.simple_multipart = False request = _Request(body=CONTENT) upload = self._makeOne(_Stream(), total_size=len(CONTENT)) - upload._SetDefaultUploadStrategy(config, request) + upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - def test__SetDefaultUploadStrategy_w_body_w_multipart_wo_simple_path(self): + def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() config.simple_path = None request = _Request(body=CONTENT) upload = self._makeOne(_Stream(), total_size=len(CONTENT)) - upload._SetDefaultUploadStrategy(config, request) + upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - def test__SetDefaultUploadStrategy_w_body_w_multipart_w_simple_path(self): + def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): from gcloud.streaming.transfer import SIMPLE_UPLOAD CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() request = _Request(body=CONTENT) upload = self._makeOne(_Stream(), total_size=len(CONTENT)) - upload._SetDefaultUploadStrategy(config, request) + upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - def test_ConfigureRequest_w_total_size_gt_max_size(self): + def test_configure_request_w_total_size_gt_max_size(self): from gcloud.streaming.exceptions import InvalidUserInputError MAX_SIZE = 1000 config = _UploadConfig() @@ -1123,9 +1040,9 @@ def test_ConfigureRequest_w_total_size_gt_max_size(self): url_builder = _Dummy() upload = self._makeOne(_Stream(), total_size=MAX_SIZE + 1) with self.assertRaises(InvalidUserInputError): - upload.ConfigureRequest(config, request, url_builder) + upload.configure_request(config, request, url_builder) - def test_ConfigureRequest_w_invalid_mimetype(self): + def test_configure_request_w_invalid_mimetype(self): from gcloud.streaming.exceptions import InvalidUserInputError config = _UploadConfig() config.accept = ('text/*',) @@ -1133,9 +1050,9 @@ def test_ConfigureRequest_w_invalid_mimetype(self): url_builder = _Dummy() upload = self._makeOne(_Stream()) with self.assertRaises(InvalidUserInputError): - upload.ConfigureRequest(config, request, url_builder) + upload.configure_request(config, request, url_builder) - def test_ConfigureRequest_w_simple_wo_body(self): + def test_configure_request_w_simple_wo_body(self): from gcloud.streaming.transfer import SIMPLE_UPLOAD CONTENT = b'CONTENT' config = _UploadConfig() @@ -1144,7 +1061,7 @@ def test_ConfigureRequest_w_simple_wo_body(self): upload = self._makeOne(_Stream(CONTENT)) upload.strategy = SIMPLE_UPLOAD - upload.ConfigureRequest(config, request, url_builder) + upload.configure_request(config, request, url_builder) self.assertEqual(url_builder.query_params, {'uploadType': 'media'}) self.assertEqual(url_builder.relative_path, config.simple_path) @@ -1153,7 +1070,7 @@ def test_ConfigureRequest_w_simple_wo_body(self): self.assertEqual(request.body, CONTENT) self.assertEqual(request.loggable_body, '') - def test_ConfigureRequest_w_simple_w_body(self): + def test_configure_request_w_simple_w_body(self): from email.parser import Parser from gcloud.streaming.transfer import SIMPLE_UPLOAD CONTENT = b'CONTENT' @@ -1165,7 +1082,7 @@ def test_ConfigureRequest_w_simple_w_body(self): upload = self._makeOne(_Stream(CONTENT)) upload.strategy = SIMPLE_UPLOAD - upload.ConfigureRequest(config, request, url_builder) + upload.configure_request(config, request, url_builder) self.assertEqual(url_builder.query_params, {'uploadType': 'multipart'}) self.assertEqual(url_builder.relative_path, config.simple_path) @@ -1196,7 +1113,7 @@ def test_ConfigureRequest_w_simple_w_body(self): self.assertEqual(app_msg._payload, CONTENT.decode('ascii')) self.assertTrue('' in request.loggable_body) - def test_ConfigureRequest_w_resumable_wo_total_size(self): + def test_configure_request_w_resumable_wo_total_size(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'CONTENT' config = _UploadConfig() @@ -1205,7 +1122,7 @@ def test_ConfigureRequest_w_resumable_wo_total_size(self): upload = self._makeOne(_Stream(CONTENT)) upload.strategy = RESUMABLE_UPLOAD - upload.ConfigureRequest(config, request, url_builder) + upload.configure_request(config, request, url_builder) self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) self.assertEqual(url_builder.relative_path, config.resumable_path) @@ -1213,7 +1130,7 @@ def test_ConfigureRequest_w_resumable_wo_total_size(self): self.assertEqual(request.headers, {'X-Upload-Content-Type': self.MIME_TYPE}) - def test_ConfigureRequest_w_resumable_w_total_size(self): + def test_configure_request_w_resumable_w_total_size(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'CONTENT' LEN = len(CONTENT) @@ -1224,7 +1141,7 @@ def test_ConfigureRequest_w_resumable_w_total_size(self): upload.total_size = LEN upload.strategy = RESUMABLE_UPLOAD - upload.ConfigureRequest(config, request, url_builder) + upload.configure_request(config, request, url_builder) self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) self.assertEqual(url_builder.relative_path, config.resumable_path) @@ -1233,21 +1150,21 @@ def test_ConfigureRequest_w_resumable_w_total_size(self): {'X-Upload-Content-Type': self.MIME_TYPE, 'X-Upload-Content-Length': '%d' % (LEN,)}) - def test_RefreshResumableUploadState_w_simple_strategy(self): + def test_refresh_upload_state_w_simple_strategy(self): from gcloud.streaming.transfer import SIMPLE_UPLOAD upload = self._makeOne(_Stream()) upload.strategy = SIMPLE_UPLOAD - upload.RefreshResumableUploadState() # no-op + upload.refresh_upload_state() # no-op - def test_RefreshResumableUploadState_not_initialized(self): + def test_refresh_upload_state_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError from gcloud.streaming.transfer import RESUMABLE_UPLOAD upload = self._makeOne(_Stream()) upload.strategy = RESUMABLE_UPLOAD with self.assertRaises(TransferInvalidError): - upload.RefreshResumableUploadState() + upload.refresh_upload_state() - def test_RefreshResumableUploadState_w_OK(self): + def test_refresh_upload_state_w_OK(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1259,7 +1176,7 @@ def test_RefreshResumableUploadState_w_OK(self): stream = _Stream() upload = self._makeOne(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) requester = _MakeRequest(response) @@ -1267,14 +1184,14 @@ def test_RefreshResumableUploadState_w_OK(self): with _Monkey(MUT, Request=_Request, make_api_request=requester): - upload.RefreshResumableUploadState() + upload.refresh_upload_state() self.assertTrue(upload.complete) self.assertEqual(upload.progress, LEN) self.assertEqual(stream.tell(), LEN) self.assertTrue(upload._final_response is response) - def test_RefreshResumableUploadState_w_CREATED(self): + def test_refresh_upload_state_w_CREATED(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1286,7 +1203,7 @@ def test_RefreshResumableUploadState_w_CREATED(self): stream = _Stream() upload = self._makeOne(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.CREATED, info, CONTENT) requester = _MakeRequest(response) @@ -1294,14 +1211,14 @@ def test_RefreshResumableUploadState_w_CREATED(self): with _Monkey(MUT, Request=_Request, make_api_request=requester): - upload.RefreshResumableUploadState() + upload.refresh_upload_state() self.assertTrue(upload.complete) self.assertEqual(upload.progress, LEN) self.assertEqual(stream.tell(), LEN) self.assertTrue(upload._final_response is response) - def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_w_range(self): + def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): from gcloud.streaming import transfer as MUT from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud._testing import _Monkey @@ -1313,7 +1230,7 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_w_range(self): stream = _Stream() upload = self._makeOne(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) info = {'range': '0-%d' % (LAST - 1,)} response = _makeResponse(RESUME_INCOMPLETE, info, CONTENT) requester = _MakeRequest(response) @@ -1321,14 +1238,14 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_w_range(self): with _Monkey(MUT, Request=_Request, make_api_request=requester): - upload.RefreshResumableUploadState() + upload.refresh_upload_state() self.assertFalse(upload.complete) self.assertEqual(upload.progress, LAST) self.assertEqual(stream.tell(), LAST) self.assertFalse(upload._final_response is response) - def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_wo_range(self): + def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): from gcloud.streaming import transfer as MUT from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE from gcloud._testing import _Monkey @@ -1340,21 +1257,21 @@ def test_RefreshResumableUploadState_w_RESUME_INCOMPLETE_wo_range(self): stream = _Stream() upload = self._makeOne(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) response = _makeResponse(RESUME_INCOMPLETE, content=CONTENT) requester = _MakeRequest(response) with _Monkey(MUT, Request=_Request, make_api_request=requester): - upload.RefreshResumableUploadState() + upload.refresh_upload_state() self.assertFalse(upload.complete) self.assertEqual(upload.progress, 0) self.assertEqual(stream.tell(), 0) self.assertFalse(upload._final_response is response) - def test_RefreshResumableUploadState_w_error(self): + def test_refresh_upload_state_w_error(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1366,7 +1283,7 @@ def test_RefreshResumableUploadState_w_error(self): stream = _Stream() upload = self._makeOne(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) response = _makeResponse(http_client.FORBIDDEN) requester = _MakeRequest(response) @@ -1374,57 +1291,57 @@ def test_RefreshResumableUploadState_w_error(self): Request=_Request, make_api_request=requester): with self.assertRaises(HttpError): - upload.RefreshResumableUploadState() + upload.refresh_upload_state() - def test__GetRangeHeaderFromResponse_miss(self): + def test__get_range_header_miss(self): upload = self._makeOne(_Stream()) response = _makeResponse(None) - self.assertTrue(upload._GetRangeHeaderFromResponse(response) is None) + self.assertTrue(upload._get_range_header(response) is None) - def test__GetRangeHeaderFromResponse_w_Range(self): + def test__get_range_header_w_Range(self): upload = self._makeOne(_Stream()) response = _makeResponse(None, {'Range': '123'}) - self.assertEqual(upload._GetRangeHeaderFromResponse(response), '123') + self.assertEqual(upload._get_range_header(response), '123') - def test__GetRangeHeaderFromResponse_w_range(self): + def test__get_range_header_w_range(self): upload = self._makeOne(_Stream()) response = _makeResponse(None, {'range': '123'}) - self.assertEqual(upload._GetRangeHeaderFromResponse(response), '123') + self.assertEqual(upload._get_range_header(response), '123') - def test_InitializeUpload_no_strategy(self): + def test_initialize_upload_no_strategy(self): from gcloud.streaming.exceptions import UserError request = _Request() upload = self._makeOne(_Stream()) with self.assertRaises(UserError): - upload.InitializeUpload(request, http=object()) + upload.initialize_upload(request, http=object()) - def test_InitializeUpload_wo_client_wo_http(self): + def test_initialize_upload_wo_client_wo_http(self): from gcloud.streaming.exceptions import UserError from gcloud.streaming.transfer import SIMPLE_UPLOAD request = _Request() upload = self._makeOne(_Stream()) upload.strategy = SIMPLE_UPLOAD with self.assertRaises(UserError): - upload.InitializeUpload(request) + upload.initialize_upload(request) - def test_InitializeUpload_simple_w_http(self): + def test_initialize_upload_simple_w_http(self): from gcloud.streaming.transfer import SIMPLE_UPLOAD request = _Request() upload = self._makeOne(_Stream()) upload.strategy = SIMPLE_UPLOAD - upload.InitializeUpload(request, http=object()) # no-op + upload.initialize_upload(request, http=object()) # no-op - def test_InitializeUpload_resumable_already_initialized(self): + def test_initialize_upload_resumable_already_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError from gcloud.streaming.transfer import RESUMABLE_UPLOAD request = _Request() upload = self._makeOne(_Stream()) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(None, self.URL) + upload._initialize(None, self.URL) with self.assertRaises(TransferInvalidError): - upload.InitializeUpload(request, http=object()) + upload.initialize_upload(request, http=object()) - def test_InitializeUpload_w_http_resumable_not_initialized_w_error(self): + def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1440,9 +1357,9 @@ def test_InitializeUpload_w_http_resumable_not_initialized_w_error(self): with _Monkey(MUT, make_api_request=requester): with self.assertRaises(HttpError): - upload.InitializeUpload(request, http=object()) + upload.initialize_upload(request, http=object()) - def test_InitializeUpload_w_http_wo_auto_transfer_w_OK(self): + def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1457,7 +1374,7 @@ def test_InitializeUpload_w_http_wo_auto_transfer_w_OK(self): requester = _MakeRequest(response) with _Monkey(MUT, make_api_request=requester): - upload.InitializeUpload(request, http=object()) + upload.initialize_upload(request, http=object()) self.assertEqual(upload._server_chunk_granularity, None) self.assertEqual(upload.url, self.UPLOAD_URL) @@ -1465,7 +1382,7 @@ def test_InitializeUpload_w_http_wo_auto_transfer_w_OK(self): self.assertEqual(len(requester._requested), 1) self.assertTrue(requester._requested[0][0] is request) - def test_InitializeUpload_w_client_w_auto_transfer_w_OK(self): + def test_initialize_upload_w_client_w_auto_transfer_w_OK(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1488,7 +1405,7 @@ def test_InitializeUpload_w_client_w_auto_transfer_w_OK(self): Request=lambda url, http_method, body: _Request(url, http_method, body), make_api_request=requester): - upload.InitializeUpload(request, client=client) + upload.initialize_upload(request, client=client) self.assertEqual(upload._server_chunk_granularity, 100) self.assertEqual(upload.url, FINALIZED_URL) @@ -1501,53 +1418,53 @@ def test_InitializeUpload_w_client_w_auto_transfer_w_OK(self): self.assertEqual(chunk_request.http_method, 'PUT') self.assertEqual(chunk_request.body, CONTENT) - def test__GetLastByte(self): + def test__last_byte(self): upload = self._makeOne(_Stream()) - self.assertEqual(upload._GetLastByte('123-456'), 456) + self.assertEqual(upload._last_byte('123-456'), 456) - def test__ValidateChunkSize_wo__server_chunk_granularity(self): + def test__validate_chunksize_wo__server_chunk_granularity(self): upload = self._makeOne(_Stream()) - upload._ValidateChunksize(123) # no-op + upload._validate_chunksize(123) # no-op - def test__ValidateChunkSize_w__server_chunk_granularity_miss(self): + def test__validate_chunksize_w__server_chunk_granularity_miss(self): from gcloud.streaming.exceptions import ConfigurationValueError upload = self._makeOne(_Stream()) upload._server_chunk_granularity = 100 with self.assertRaises(ConfigurationValueError): - upload._ValidateChunksize(123) + upload._validate_chunksize(123) - def test__ValidateChunkSize_w__server_chunk_granularity_hit(self): + def test__validate_chunksize_w__server_chunk_granularity_hit(self): upload = self._makeOne(_Stream()) upload._server_chunk_granularity = 100 - upload._ValidateChunksize(400) + upload._validate_chunksize(400) - def test__StreamMedia_w_simple_strategy(self): + def test_stream_file_w_simple_strategy(self): from gcloud.streaming.exceptions import InvalidUserInputError from gcloud.streaming.transfer import SIMPLE_UPLOAD upload = self._makeOne(_Stream()) upload.strategy = SIMPLE_UPLOAD with self.assertRaises(InvalidUserInputError): - upload._StreamMedia() + upload.stream_file() - def test__StreamMedia_w_use_chunks_invalid_chunk_size(self): + def test_stream_file_w_use_chunks_invalid_chunk_size(self): from gcloud.streaming.exceptions import ConfigurationValueError from gcloud.streaming.transfer import RESUMABLE_UPLOAD upload = self._makeOne(_Stream(), chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 100 with self.assertRaises(ConfigurationValueError): - upload._StreamMedia(use_chunks=True) + upload.stream_file(use_chunks=True) - def test__StreamMedia_not_initialized(self): + def test_stream_file_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError from gcloud.streaming.transfer import RESUMABLE_UPLOAD upload = self._makeOne(_Stream(), chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 with self.assertRaises(TransferInvalidError): - upload._StreamMedia() + upload.stream_file() - def test__StreamMedia_already_complete_w_unseekable_stream(self): + def test_stream_file_already_complete_w_unseekable_stream(self): from gcloud.streaming.transfer import RESUMABLE_UPLOAD http = object() stream = object() @@ -1555,12 +1472,12 @@ def test__StreamMedia_already_complete_w_unseekable_stream(self): upload = self._makeOne(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) upload._final_response = response upload._complete = True - self.assertTrue(upload._StreamMedia() is response) + self.assertTrue(upload.stream_file() is response) - def test__StreamMedia_already_complete_w_seekable_stream_unsynced(self): + def test_stream_file_already_complete_w_seekable_stream_unsynced(self): from gcloud.streaming.exceptions import CommunicationError from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' @@ -1570,13 +1487,13 @@ def test__StreamMedia_already_complete_w_seekable_stream_unsynced(self): upload = self._makeOne(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) upload._final_response = response upload._complete = True with self.assertRaises(CommunicationError): - upload._StreamMedia() + upload.stream_file() - def test__StreamMedia_already_complete_w_seekable_stream_synced(self): + def test_stream_file_already_complete_w_seekable_stream_synced(self): import os from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' @@ -1587,12 +1504,12 @@ def test__StreamMedia_already_complete_w_seekable_stream_synced(self): upload = self._makeOne(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) upload._final_response = response upload._complete = True - self.assertTrue(upload._StreamMedia(use_chunks=False) is response) + self.assertTrue(upload.stream_file(use_chunks=False) is response) - def test__StreamMedia_incomplete(self): + def test_stream_file_incomplete(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1604,7 +1521,7 @@ def test__StreamMedia_incomplete(self): upload = self._makeOne(stream, chunksize=6) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 6 - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) info_1 = {'content-length': '0', 'range': 'bytes=0-5'} response_1 = _makeResponse(RESUME_INCOMPLETE, info_1) @@ -1616,7 +1533,7 @@ def test__StreamMedia_incomplete(self): Request=lambda url, http_method, body: _Request(url, http_method, body), make_api_request=requester): - response = upload._StreamMedia() + response = upload.stream_file() self.assertEqual(len(requester._responses), 0) self.assertEqual(len(requester._requested), 2) @@ -1637,7 +1554,7 @@ def test__StreamMedia_incomplete(self): 'Content-Type': self.MIME_TYPE}) self.assertEqual(request_2.body, CONTENT[6:]) - def test__StreamMedia_incomplete_w_transfer_error_w_addl_headers(self): + def test_stream_file_incomplete_w_transfer_error(self): from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT from gcloud.streaming.exceptions import CommunicationError @@ -1646,11 +1563,10 @@ def test__StreamMedia_incomplete_w_transfer_error_w_addl_headers(self): CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) - headers = {'foo': 'bar'} upload = self._makeOne(stream, chunksize=6) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 6 - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) info = {'content-length': '0', 'range': 'bytes=0-4', # simulate error, s.b. '0-5' @@ -1663,7 +1579,7 @@ def test__StreamMedia_incomplete_w_transfer_error_w_addl_headers(self): _Request(url, http_method, body), make_api_request=requester): with self.assertRaises(CommunicationError): - upload._StreamMedia(additional_headers=headers) + upload.stream_file() self.assertEqual(len(requester._responses), 0) self.assertEqual(len(requester._requested), 1) @@ -1673,41 +1589,10 @@ def test__StreamMedia_incomplete_w_transfer_error_w_addl_headers(self): self.assertEqual(request.http_method, 'PUT') self.assertEqual(request.headers, {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE, - 'foo': 'bar'}) + 'Content-Type': self.MIME_TYPE}) self.assertEqual(request.body, CONTENT[:6]) - def test_StreamMedia_defaults(self): - upload = self._makeOne(_Stream()) - response = object() - upload._StreamMedia = streamer = _MediaStreamer(response) - self.assertTrue(upload.StreamMedia() is response) - self.assertEqual(streamer._called_with, (None, False)) - - def test_StreamMedia_explicit(self): - upload = self._makeOne(_Stream()) - response = object() - headers = {'foo': 'bar'} - upload._StreamMedia = streamer = _MediaStreamer(response) - self.assertTrue(upload.StreamMedia(headers) is response) - self.assertEqual(streamer._called_with, (headers, False)) - - def test_StreamInChunks_defaults(self): - upload = self._makeOne(_Stream()) - response = object() - upload._StreamMedia = streamer = _MediaStreamer(response) - self.assertTrue(upload.StreamInChunks() is response) - self.assertEqual(streamer._called_with, (None, True)) - - def test_StreamInChunks_explicit(self): - upload = self._makeOne(_Stream()) - response = object() - headers = {'foo': 'bar'} - upload._StreamMedia = streamer = _MediaStreamer(response) - self.assertTrue(upload.StreamInChunks(headers) is response) - self.assertEqual(streamer._called_with, (headers, True)) - - def test__SendMediaRequest_wo_error(self): + def test__send_media_request_wo_error(self): from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT from gcloud.streaming.http_wrapper import RESUME_INCOMPLETE @@ -1725,7 +1610,7 @@ def test__SendMediaRequest_wo_error(self): requester = _MakeRequest(response) with _Monkey(MUT, make_api_request=requester): - upload._SendMediaRequest(request, 9) + upload._send_media_request(request, 9) self.assertEqual(len(requester._responses), 0) self.assertEqual(len(requester._requested), 1) @@ -1734,7 +1619,7 @@ def test__SendMediaRequest_wo_error(self): self.assertTrue(used_http is bytes_http) self.assertEqual(stream.tell(), 4) - def test__SendMediaRequest_w_error(self): + def test__send_media_request_w_error(self): from six.moves import http_client from gcloud._testing import _Monkey from gcloud.streaming import transfer as MUT @@ -1747,7 +1632,7 @@ def test__SendMediaRequest_w_error(self): stream = _Stream(CONTENT) upload = self._makeOne(stream) upload.strategy = RESUMABLE_UPLOAD - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) upload.bytes_http = bytes_http headers = {'Content-Range': 'bytes 0-9/10', @@ -1763,7 +1648,7 @@ def test__SendMediaRequest_w_error(self): Request=_Request, make_api_request=requester): with self.assertRaises(HttpError): - upload._SendMediaRequest(request, 9) + upload._send_media_request(request, 9) self.assertEqual(len(requester._responses), 0) self.assertEqual(len(requester._requested), 2) @@ -1777,28 +1662,27 @@ def test__SendMediaRequest_w_error(self): {'Content-Range': 'bytes */*'}) self.assertTrue(second_http is http) - def test__SendMediaBody_not_initialized(self): + def test__send_media_body_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError upload = self._makeOne(_Stream()) with self.assertRaises(TransferInvalidError): - upload._SendMediaBody(0) + upload._send_media_body(0) - def test__SendMediaBody_wo_total_size(self): + def test__send_media_body_wo_total_size(self): from gcloud.streaming.exceptions import TransferInvalidError http = object() upload = self._makeOne(_Stream()) - upload._Initialize(http, _Request.URL) + upload._initialize(http, _Request.URL) with self.assertRaises(TransferInvalidError): - upload._SendMediaBody(0) + upload._send_media_body(0) - def test__SendMediaBody_wo_additional_headers_start_lt_total_size(self): + def test__send_media_body_start_lt_total_size(self): from gcloud.streaming.stream_slice import StreamSlice SIZE = 1234 http = object() stream = _Stream() upload = self._makeOne(stream, total_size=SIZE) - upload._Initialize(http, self.UPLOAD_URL) - headers = {'foo': 'bar'} + upload._initialize(http, self.UPLOAD_URL) _called_with = [] response = object() @@ -1806,9 +1690,9 @@ def _send_media_request(request, end): _called_with.append((request, end)) return response - upload._SendMediaRequest = _send_media_request + upload._send_media_request = _send_media_request - found = upload._SendMediaBody(0) + found = upload._send_media_body(0) self.assertTrue(found is response) self.assertEqual(len(_called_with), 1) @@ -1825,14 +1709,13 @@ def _send_media_request(request, end): 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) self.assertEqual(end, SIZE) - def test__SendMediaBody_w_additional_headers_start_eq_total_size(self): + def test__send_media_body_start_eq_total_size(self): from gcloud.streaming.stream_slice import StreamSlice SIZE = 1234 http = object() stream = _Stream() upload = self._makeOne(stream, total_size=SIZE) - upload._Initialize(http, self.UPLOAD_URL) - headers = {'foo': 'bar'} + upload._initialize(http, self.UPLOAD_URL) _called_with = [] response = object() @@ -1840,9 +1723,9 @@ def _send_media_request(request, end): _called_with.append((request, end)) return response - upload._SendMediaRequest = _send_media_request + upload._send_media_request = _send_media_request - found = upload._SendMediaBody(SIZE, additional_headers=headers) + found = upload._send_media_body(SIZE) self.assertTrue(found is response) self.assertEqual(len(_called_with), 1) @@ -1855,23 +1738,22 @@ def _send_media_request(request, end): self.assertEqual(len(body_stream), 0) self.assertEqual(request.headers, {'content-length': '0', # speling! - 'foo': 'bar', 'Content-Type': self.MIME_TYPE, 'Content-Range': 'bytes */%d' % (SIZE,)}) self.assertEqual(end, SIZE) - def test__SendChunk_not_initialized(self): + def test__send_chunk_not_initialized(self): from gcloud.streaming.exceptions import TransferInvalidError upload = self._makeOne(_Stream()) with self.assertRaises(TransferInvalidError): - upload._SendChunk(0) + upload._send_chunk(0) - def test__SendChunk_wo_total_size_stream_exhausted(self): + def test__send_chunk_wo_total_size_stream_exhausted(self): CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) http = object() upload = self._makeOne(_Stream(CONTENT), chunksize=1000) - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) _called_with = [] response = object() @@ -1879,10 +1761,10 @@ def _send_media_request(request, end): _called_with.append((request, end)) return response - upload._SendMediaRequest = _send_media_request + upload._send_media_request = _send_media_request self.assertEqual(upload.total_size, None) - found = upload._SendChunk(0) + found = upload._send_chunk(0) self.assertTrue(found is response) self.assertEqual(upload.total_size, SIZE) @@ -1897,13 +1779,13 @@ def _send_media_request(request, end): 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) self.assertEqual(end, SIZE) - def test__SendChunk_wo_total_size_stream_not_exhausted(self): + def test__send_chunk_wo_total_size_stream_not_exhausted(self): CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) CHUNK_SIZE = SIZE - 5 http = object() upload = self._makeOne(_Stream(CONTENT), chunksize=CHUNK_SIZE) - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) _called_with = [] response = object() @@ -1911,10 +1793,10 @@ def _send_media_request(request, end): _called_with.append((request, end)) return response - upload._SendMediaRequest = _send_media_request + upload._send_media_request = _send_media_request self.assertEqual(upload.total_size, None) - found = upload._SendChunk(0) + found = upload._send_chunk(0) self.assertTrue(found is response) self.assertEqual(upload.total_size, None) @@ -1929,7 +1811,7 @@ def _send_media_request(request, end): 'Content-Range': 'bytes 0-%d/*' % (CHUNK_SIZE- 1,)}) self.assertEqual(end, CHUNK_SIZE) - def test__SendChunk_w_total_size_stream_not_exhausted(self): + def test__send_chunk_w_total_size_stream_not_exhausted(self): from gcloud.streaming.stream_slice import StreamSlice CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) @@ -1937,7 +1819,7 @@ def test__SendChunk_w_total_size_stream_not_exhausted(self): http = object() stream = _Stream(CONTENT) upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) _called_with = [] response = object() @@ -1945,8 +1827,8 @@ def _send_media_request(request, end): _called_with.append((request, end)) return response - upload._SendMediaRequest = _send_media_request - found = upload._SendChunk(0) + upload._send_media_request = _send_media_request + found = upload._send_chunk(0) self.assertTrue(found is response) request, end = _called_with[0] @@ -1963,16 +1845,15 @@ def _send_media_request(request, end): % (CHUNK_SIZE- 1, SIZE)}) self.assertEqual(end, CHUNK_SIZE) - def test__SendChunk_w_total_size_stream_exhausted_w_addl_headers(self): + def test__send_chunk_w_total_size_stream_exhausted(self): from gcloud.streaming.stream_slice import StreamSlice CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) CHUNK_SIZE = 1000 http = object() - headers = {'foo': 'bar'} stream = _Stream(CONTENT) upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._Initialize(http, self.UPLOAD_URL) + upload._initialize(http, self.UPLOAD_URL) _called_with = [] response = object() @@ -1980,9 +1861,9 @@ def _send_media_request(request, end): _called_with.append((request, end)) return response - upload._SendMediaRequest = _send_media_request + upload._send_media_request = _send_media_request - found = upload._SendChunk(SIZE, additional_headers=headers) + found = upload._send_chunk(SIZE) self.assertTrue(found is response) self.assertEqual(len(_called_with), 1) @@ -1995,7 +1876,6 @@ def _send_media_request(request, end): self.assertEqual(len(body_stream), 0) self.assertEqual(request.headers, {'content-length': '0', # speling! - 'foo': 'bar', 'Content-Type': self.MIME_TYPE, 'Content-Range': 'bytes */%d' % (SIZE,)}) self.assertEqual(end, SIZE) @@ -2092,9 +1972,9 @@ class _MediaStreamer(object): def __init__(self, response): self._response = response - def __call__(self, additional_headers=None, use_chunks=True): + def __call__(self, use_chunks=True): assert self._called_with is None - self._called_with = (additional_headers, use_chunks) + self._called_with = use_chunks return self._response diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 45f4888cdec2..e2f53330ce79 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -112,7 +112,7 @@ def stream(self): def url(self): return self.__url - def _Initialize(self, http, url): + def _initialize(self, http, url): """Initialize this download by setting self.http and self.url. We want the user to be able to override self.http by having set @@ -126,7 +126,7 @@ def _Initialize(self, http, url): Returns: None. Initializes self. """ - self.EnsureUninitialized() + self._ensure_uninitialized() if self.http is None: self.__http = http or get_http() self.__url = url @@ -135,19 +135,15 @@ def _Initialize(self, http, url): def initialized(self): return self.url is not None and self.http is not None - @property - def _type_name(self): - return type(self).__name__ - - def EnsureInitialized(self): + def _ensure_initialized(self): if not self.initialized: raise TransferInvalidError( - 'Cannot use uninitialized %s', self._type_name) + 'Cannot use uninitialized %s', type(self).__name__) - def EnsureUninitialized(self): + def _ensure_uninitialized(self): if self.initialized: raise TransferInvalidError( - 'Cannot re-initialize %s', self._type_name) + 'Cannot re-initialize %s', type(self).__name__) def __del__(self): if self.__close_stream: @@ -177,7 +173,7 @@ def __init__(self, stream, **kwds): self.__encoding = None @classmethod - def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds): + def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): """Create a new download object from a filename.""" path = os.path.expanduser(filename) if os.path.exists(path) and not overwrite: @@ -187,7 +183,7 @@ def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds): auto_transfer=auto_transfer, **kwds) @classmethod - def FromStream(cls, stream, auto_transfer=True, total_size=None, **kwds): + def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): """Create a new Download object from a stream.""" return cls(stream, auto_transfer=auto_transfer, total_size=total_size, **kwds) @@ -211,7 +207,7 @@ def __repr__(self): return 'Download with %d/%s bytes transferred from url %s' % ( self.progress, self.total_size, self.url) - def ConfigureRequest(self, http_request, url_builder): + def configure_request(self, http_request, url_builder): url_builder.query_params['alt'] = 'media' # TODO(craigcitro): We need to send range requests because by # default httplib2 stores entire reponses in memory. Override @@ -219,7 +215,7 @@ def ConfigureRequest(self, http_request, url_builder): # necessary. http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,) - def _SetTotal(self, info): + def _set_total(self, info): if 'content-range' in info: _, _, total = info['content-range'].rpartition('/') if total != '*': @@ -231,7 +227,7 @@ def _SetTotal(self, info): if self.total_size is None: self.__total_size = 0 - def InitializeDownload(self, http_request, http=None, client=None): + def initialize_download(self, http_request, http=None, client=None): """Initialize this download by making a request. Args: @@ -241,7 +237,7 @@ def InitializeDownload(self, http_request, http=None, client=None): sending any additional requests. If client is provided and http is not, client.http will be used instead. """ - self.EnsureUninitialized() + self._ensure_uninitialized() if http is None and client is None: raise UserError('Must provide client or http.') http = http or client.http @@ -249,24 +245,24 @@ def InitializeDownload(self, http_request, http=None, client=None): http_request.url = client.FinalizeTransferUrl(http_request.url) url = http_request.url if self.auto_transfer: - end_byte = self._ComputeEndByte(0) - self._SetRangeHeader(http_request, 0, end_byte) + end_byte = self._compute_end_byte(0) + self._set_range_header(http_request, 0, end_byte) response = make_api_request( self.bytes_http or http, http_request) if response.status_code not in self._ACCEPTABLE_STATUSES: raise HttpError.FromResponse(response) self._initial_response = response - self._SetTotal(response.info) + self._set_total(response.info) url = response.info.get('content-location', response.request_url) if client is not None: url = client.FinalizeTransferUrl(url) - self._Initialize(http, url) + self._initialize(http, url) # Unless the user has requested otherwise, we want to just # go ahead and pump the bytes now. if self.auto_transfer: - self.StreamInChunks() + self.stream_file(use_chunks=True) - def _NormalizeStartEnd(self, start, end=None): + def _normalize_start_end(self, start, end=None): if end is not None: if start < 0: raise TransferInvalidError( @@ -284,7 +280,7 @@ def _NormalizeStartEnd(self, start, end=None): start = max(0, start + self.total_size) return start, self.total_size - 1 - def _SetRangeHeader(self, request, start, end=None): + def _set_range_header(self, request, start, end=None): if start < 0: request.headers['range'] = 'bytes=%d' % start elif end is None: @@ -292,7 +288,7 @@ def _SetRangeHeader(self, request, start, end=None): else: request.headers['range'] = 'bytes=%d-%d' % (start, end) - def _ComputeEndByte(self, start, end=None, use_chunks=True): + def _compute_end_byte(self, start, end=None, use_chunks=True): """Compute the last byte to fetch for this request. This is all based on the HTTP spec for Range and @@ -335,18 +331,16 @@ def _ComputeEndByte(self, start, end=None, use_chunks=True): return end_byte - def _GetChunk(self, start, end, additional_headers=None): + def _get_chunk(self, start, end): """Retrieve a chunk, and return the full response.""" - self.EnsureInitialized() + self._ensure_initialized() request = Request(url=self.url) - self._SetRangeHeader(request, start, end=end) - if additional_headers is not None: - request.headers.update(additional_headers) + self._set_range_header(request, start, end=end) return make_api_request( self.bytes_http, request, retry_func=self.retry_func, retries=self.num_retries) - def _ProcessResponse(self, response): + def _process_response(self, response): """Process response (by updating self and writing to self.stream).""" if response.status_code not in self._ACCEPTABLE_STATUSES: # We distinguish errors that mean we made a mistake in setting @@ -371,8 +365,7 @@ def _ProcessResponse(self, response): self.stream.write('') return response - def GetRange(self, start, end=None, additional_headers=None, - use_chunks=True): + def get_range(self, start, end=None, use_chunks=True): """Retrieve a given byte range from this download, inclusive. Range must be of one of these three forms: @@ -386,70 +379,57 @@ def GetRange(self, start, end=None, additional_headers=None, Args: start: (int) Where to start fetching bytes. (See above.) end: (int, optional) Where to stop fetching bytes. (See above.) - additional_headers: (bool, optional) Any additional headers to - pass with the request. use_chunks: (bool, default: True) If False, ignore self.chunksize and fetch this range in a single request. Returns: None. Streams bytes into self.stream. """ - self.EnsureInitialized() + self._ensure_initialized() progress_end_normalized = False if self.total_size is not None: - progress, end_byte = self._NormalizeStartEnd(start, end) + progress, end_byte = self._normalize_start_end(start, end) progress_end_normalized = True else: progress = start end_byte = end while (not progress_end_normalized or end_byte is None or progress <= end_byte): - end_byte = self._ComputeEndByte(progress, end=end_byte, + end_byte = self._compute_end_byte(progress, end=end_byte, use_chunks=use_chunks) - response = self._GetChunk(progress, end_byte, - additional_headers=additional_headers) + response = self._get_chunk(progress, end_byte) if not progress_end_normalized: - self._SetTotal(response.info) - progress, end_byte = self._NormalizeStartEnd(start, end) + self._set_total(response.info) + progress, end_byte = self._normalize_start_end(start, end) progress_end_normalized = True - response = self._ProcessResponse(response) + response = self._process_response(response) progress += response.length if response.length == 0: raise TransferRetryError( 'Zero bytes unexpectedly returned in download response') - def StreamInChunks(self, additional_headers=None): - """Stream the entire download in chunks.""" - # XXX: this function should just go away during cleanup - self.StreamMedia(additional_headers=additional_headers, - use_chunks=True) - - def StreamMedia(self, additional_headers=None, use_chunks=True): + def stream_file(self, use_chunks=True): """Stream the entire download. Args: - additional_headers: (default: None) Additional headers to - include in fetching bytes. use_chunks: (bool, default: True) If False, ignore self.chunksize and stream this download in a single request. Returns: None. Streams bytes into self.stream. """ - self.EnsureInitialized() + self._ensure_initialized() while True: if self._initial_response is not None: response = self._initial_response self._initial_response = None else: - end_byte = self._ComputeEndByte(self.progress, + end_byte = self._compute_end_byte(self.progress, use_chunks=use_chunks) - response = self._GetChunk( - self.progress, end_byte, - additional_headers=additional_headers) + response = self._get_chunk(self.progress, end_byte) if self.total_size is None: - self._SetTotal(response.info) - response = self._ProcessResponse(response) + self._set_total(response.info) + response = self._process_response(response) if (response.status_code == http_client.OK or self.progress >= self.total_size): break @@ -486,7 +466,7 @@ def __init__(self, stream, mime_type, total_size=None, http=None, self.__total_size = total_size @classmethod - def FromFile(cls, filename, mime_type=None, auto_transfer=True, **kwds): + def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): """Create a new Upload object from a filename.""" path = os.path.expanduser(filename) if not os.path.exists(path): @@ -501,7 +481,7 @@ def FromFile(cls, filename, mime_type=None, auto_transfer=True, **kwds): close_stream=True, auto_transfer=auto_transfer, **kwds) @classmethod - def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True, + def from_stream(cls, stream, mime_type, total_size=None, auto_transfer=True, **kwds): """Create a new Upload object from a stream.""" if mime_type is None: @@ -540,7 +520,7 @@ def total_size(self): @total_size.setter def total_size(self, value): - self.EnsureUninitialized() + self._ensure_uninitialized() self.__total_size = value def __repr__(self): @@ -550,7 +530,7 @@ def __repr__(self): return 'Upload with %d/%s bytes transferred for url %s' % ( self.progress, self.total_size or '???', self.url) - def _SetDefaultUploadStrategy(self, upload_config, http_request): + def _set_default_strategy(self, upload_config, http_request): """Determine and set the default upload strategy for this upload. We generally prefer simple or multipart, unless we're forced to @@ -579,7 +559,7 @@ def _SetDefaultUploadStrategy(self, upload_config, http_request): strategy = RESUMABLE_UPLOAD self.strategy = strategy - def ConfigureRequest(self, upload_config, http_request, url_builder): + def configure_request(self, upload_config, http_request, url_builder): """Configure the request and url for this upload.""" # Validate total_size vs. max_size if (self.total_size and upload_config.max_size and @@ -593,27 +573,27 @@ def ConfigureRequest(self, upload_config, http_request, url_builder): 'MIME type %s does not match any accepted MIME ranges %s' % ( self.mime_type, upload_config.accept)) - self._SetDefaultUploadStrategy(upload_config, http_request) + self._set_default_strategy(upload_config, http_request) if self.strategy == SIMPLE_UPLOAD: url_builder.relative_path = upload_config.simple_path if http_request.body: url_builder.query_params['uploadType'] = 'multipart' - self._ConfigureMultipartRequest(http_request) + self._configure_multipart_request(http_request) else: url_builder.query_params['uploadType'] = 'media' - self._ConfigureMediaRequest(http_request) + self._configure_media_request(http_request) else: url_builder.relative_path = upload_config.resumable_path url_builder.query_params['uploadType'] = 'resumable' - self._ConfigureResumableRequest(http_request) + self._configure_resumable_request(http_request) - def _ConfigureMediaRequest(self, http_request): + def _configure_media_request(self, http_request): """Configure http_request as a simple request for this upload.""" http_request.headers['content-type'] = self.mime_type http_request.body = self.stream.read() http_request.loggable_body = '' - def _ConfigureMultipartRequest(self, http_request): + def _configure_multipart_request(self, http_request): """Configure http_request as a multipart request for this upload.""" # This is a multipart/related upload. msg_root = mime_multipart.MIMEMultipart('related') @@ -651,13 +631,13 @@ def _ConfigureMultipartRequest(self, http_request): body_components[-2] = '\n\n'.join([headers, '\n\n--']) http_request.loggable_body = multipart_boundary.join(body_components) - def _ConfigureResumableRequest(self, http_request): + def _configure_resumable_request(self, http_request): http_request.headers['X-Upload-Content-Type'] = self.mime_type if self.total_size is not None: http_request.headers[ 'X-Upload-Content-Length'] = str(self.total_size) - def RefreshResumableUploadState(self): + def refresh_upload_state(self): """Talk to the server and refresh the state of this resumable upload. Returns: @@ -665,7 +645,7 @@ def RefreshResumableUploadState(self): """ if self.strategy != RESUMABLE_UPLOAD: return - self.EnsureInitialized() + self._ensure_initialized() # XXX Per RFC 2616/7231, a 'PUT' request is absolutely inappropriate # here: # it is intended to be used to replace the entire resource, # not to # query for a status. @@ -681,7 +661,7 @@ def RefreshResumableUploadState(self): refresh_response = make_api_request( self.http, refresh_request, redirections=0, retries=self.num_retries) - range_header = self._GetRangeHeaderFromResponse(refresh_response) + range_header = self._get_range_header(refresh_response) if refresh_response.status_code in (http_client.OK, http_client.CREATED): self._complete = True @@ -695,12 +675,12 @@ def RefreshResumableUploadState(self): if range_header is None: self.__progress = 0 else: - self.__progress = self._GetLastByte(range_header) + 1 + self.__progress = self._last_byte(range_header) + 1 self.stream.seek(self.progress) else: raise HttpError.FromResponse(refresh_response) - def _GetRangeHeaderFromResponse(self, response): + def _get_range_header(self, response): # XXX Per RFC 2616/7233, 'Range' is a request header, not a response # header: # If the back-end is actually setting 'Range' on responses, # somebody should be spanked: it should be sending 'Content-Range' @@ -712,11 +692,11 @@ def _GetRangeHeaderFromResponse(self, response): # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#chunking return response.info.get('Range', response.info.get('range')) - def InitializeUpload(self, http_request, http=None, client=None): + def initialize_upload(self, http_request, http=None, client=None): """Initialize this upload from the given http_request.""" if self.strategy is None: raise UserError( - 'No upload strategy set; did you call ConfigureRequest?') + 'No upload strategy set; did you call configure_request?') if http is None and client is None: raise UserError('Must provide client or http.') if self.strategy != RESUMABLE_UPLOAD: @@ -724,7 +704,7 @@ def InitializeUpload(self, http_request, http=None, client=None): http = http or client.http if client is not None: http_request.url = client.FinalizeTransferUrl(http_request.url) - self.EnsureUninitialized() + self._ensure_uninitialized() http_response = make_api_request(http, http_request, retries=self.num_retries) if http_response.status_code != http_client.OK: @@ -738,21 +718,21 @@ def InitializeUpload(self, http_request, http=None, client=None): url = http_response.info['location'] if client is not None: url = client.FinalizeTransferUrl(url) - self._Initialize(http, url) + self._initialize(http, url) # Unless the user has requested otherwise, we want to just # go ahead and pump the bytes now. if self.auto_transfer: - return self.StreamInChunks() + return self.stream_file(use_chunks=True) else: return http_response - def _GetLastByte(self, range_header): + def _last_byte(self, range_header): _, _, end = range_header.partition('-') # TODO(craigcitro): Validate start == 0? return int(end) - def _ValidateChunksize(self, chunksize=None): + def _validate_chunksize(self, chunksize=None): if self._server_chunk_granularity is None: return chunksize = chunksize or self.chunksize @@ -761,24 +741,27 @@ def _ValidateChunksize(self, chunksize=None): 'Server requires chunksize to be a multiple of %d', self._server_chunk_granularity) - def _StreamMedia(self, additional_headers=None, use_chunks=True): - """Helper function for StreamMedia / StreamInChunks.""" + def stream_file(self, use_chunks=True): + """Send this resumable upload + + If 'use_chunks' is False, send it in a single reques. Otherwise, + send it in chunks. + """ if self.strategy != RESUMABLE_UPLOAD: raise InvalidUserInputError( 'Cannot stream non-resumable upload') # final_response is set if we resumed an already-completed upload. response = self._final_response - send_func = self._SendChunk if use_chunks else self._SendMediaBody + send_func = self._send_chunk if use_chunks else self._send_media_body if use_chunks: - self._ValidateChunksize(self.chunksize) - self.EnsureInitialized() + self._validate_chunksize(self.chunksize) + self._ensure_initialized() while not self.complete: - response = send_func(self.stream.tell(), - additional_headers=additional_headers) + response = send_func(self.stream.tell()) if response.status_code in (http_client.OK, http_client.CREATED): self._complete = True break - self.__progress = self._GetLastByte(response.info['range']) + self.__progress = self._last_byte(response.info['range']) if self.progress + 1 != self.stream.tell(): # TODO(craigcitro): Add a better way to recover here. raise CommunicationError( @@ -795,24 +778,7 @@ def _StreamMedia(self, additional_headers=None, use_chunks=True): (int(end_pos) - int(current_pos))) return response - def StreamMedia(self, additional_headers=None): - """Send this resumable upload in a single request. - - Args: - additional_headers: Dict of headers to include with the upload - http_wrapper.Request. - - Returns: - http_wrapper.Response of final response. - """ - return self._StreamMedia(additional_headers=additional_headers, - use_chunks=False) - - def StreamInChunks(self, additional_headers=None): - """Send this (resumable) upload in chunks.""" - return self._StreamMedia(additional_headers=additional_headers) - - def _SendMediaRequest(self, request, end): + def _send_media_request(self, request, end): """Request helper function for SendMediaBody & SendChunk.""" response = make_api_request( self.bytes_http, request, retry_func=self.retry_func, @@ -821,18 +787,18 @@ def _SendMediaRequest(self, request, end): RESUME_INCOMPLETE): # We want to reset our state to wherever the server left us # before this failed request, and then raise. - self.RefreshResumableUploadState() + self.refresh_upload_state() raise HttpError.FromResponse(response) if response.status_code == RESUME_INCOMPLETE: - last_byte = self._GetLastByte( - self._GetRangeHeaderFromResponse(response)) + last_byte = self._last_byte( + self._get_range_header(response)) if last_byte + 1 != end: self.stream.seek(last_byte) return response - def _SendMediaBody(self, start, additional_headers=None): + def _send_media_body(self, start): """Send the entire media stream in a single request.""" - self.EnsureInitialized() + self._ensure_initialized() if self.total_size is None: raise TransferInvalidError( 'Total size must be known for SendMediaBody') @@ -849,14 +815,12 @@ def _SendMediaBody(self, start, additional_headers=None): self.total_size) request.headers['Content-Range'] = range_string - if additional_headers: - request.headers.update(additional_headers) - return self._SendMediaRequest(request, self.total_size) + return self._send_media_request(request, self.total_size) - def _SendChunk(self, start, additional_headers=None): + def _send_chunk(self, start): """Send the specified chunk.""" - self.EnsureInitialized() + self._ensure_initialized() no_log_body = self.total_size is None if self.total_size is None: # For the streaming resumable case, we need to detect when @@ -896,7 +860,5 @@ def _SendChunk(self, start, additional_headers=None): range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size) request.headers['Content-Range'] = range_string - if additional_headers: - request.headers.update(additional_headers) - return self._SendMediaRequest(request, end) + return self._send_media_request(request, end) From 73c3282876e18a983297d69494f7ee581259a908 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 28 Oct 2015 20:51:12 -0400 Subject: [PATCH 11/17] Fix coverage gap from branch split. --- gcloud/streaming/test_transfer.py | 74 +++++++++---------------------- 1 file changed, 20 insertions(+), 54 deletions(-) diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index 8ab971e603b3..230224e75537 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -1683,20 +1683,14 @@ def test__send_media_body_start_lt_total_size(self): stream = _Stream() upload = self._makeOne(stream, total_size=SIZE) upload._initialize(http, self.UPLOAD_URL) - _called_with = [] response = object() - - def _send_media_request(request, end): - _called_with.append((request, end)) - return response - - upload._send_media_request = _send_media_request + streamer = _MediaStreamer(response) + upload._send_media_request = streamer found = upload._send_media_body(0) self.assertTrue(found is response) - self.assertEqual(len(_called_with), 1) - request, end = _called_with[0] + request, end = streamer._called_with self.assertEqual(request.url, self.UPLOAD_URL) self.assertEqual(request.http_method, 'PUT') body_stream = request.body @@ -1716,20 +1710,14 @@ def test__send_media_body_start_eq_total_size(self): stream = _Stream() upload = self._makeOne(stream, total_size=SIZE) upload._initialize(http, self.UPLOAD_URL) - _called_with = [] response = object() - - def _send_media_request(request, end): - _called_with.append((request, end)) - return response - - upload._send_media_request = _send_media_request + streamer = _MediaStreamer(response) + upload._send_media_request = streamer found = upload._send_media_body(SIZE) self.assertTrue(found is response) - self.assertEqual(len(_called_with), 1) - request, end = _called_with[0] + request, end = streamer._called_with self.assertEqual(request.url, self.UPLOAD_URL) self.assertEqual(request.http_method, 'PUT') body_stream = request.body @@ -1754,22 +1742,16 @@ def test__send_chunk_wo_total_size_stream_exhausted(self): http = object() upload = self._makeOne(_Stream(CONTENT), chunksize=1000) upload._initialize(http, self.UPLOAD_URL) - _called_with = [] response = object() - - def _send_media_request(request, end): - _called_with.append((request, end)) - return response - - upload._send_media_request = _send_media_request + streamer = _MediaStreamer(response) + upload._send_media_request = streamer self.assertEqual(upload.total_size, None) found = upload._send_chunk(0) self.assertTrue(found is response) self.assertEqual(upload.total_size, SIZE) - self.assertEqual(len(_called_with), 1) - request, end = _called_with[0] + request, end = streamer._called_with self.assertEqual(request.url, self.UPLOAD_URL) self.assertEqual(request.http_method, 'PUT') self.assertEqual(request.body, CONTENT) @@ -1786,22 +1768,16 @@ def test__send_chunk_wo_total_size_stream_not_exhausted(self): http = object() upload = self._makeOne(_Stream(CONTENT), chunksize=CHUNK_SIZE) upload._initialize(http, self.UPLOAD_URL) - _called_with = [] response = object() - - def _send_media_request(request, end): - _called_with.append((request, end)) - return response - - upload._send_media_request = _send_media_request + streamer = _MediaStreamer(response) + upload._send_media_request = streamer self.assertEqual(upload.total_size, None) found = upload._send_chunk(0) self.assertTrue(found is response) self.assertEqual(upload.total_size, None) - self.assertEqual(len(_called_with), 1) - request, end = _called_with[0] + request, end = streamer._called_with self.assertEqual(request.url, self.UPLOAD_URL) self.assertEqual(request.http_method, 'PUT') self.assertEqual(request.body, CONTENT[:CHUNK_SIZE]) @@ -1820,18 +1796,14 @@ def test__send_chunk_w_total_size_stream_not_exhausted(self): stream = _Stream(CONTENT) upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) upload._initialize(http, self.UPLOAD_URL) - _called_with = [] response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer - def _send_media_request(request, end): - _called_with.append((request, end)) - return response - - upload._send_media_request = _send_media_request found = upload._send_chunk(0) self.assertTrue(found is response) - request, end = _called_with[0] + request, end = streamer._called_with self.assertEqual(request.url, self.UPLOAD_URL) self.assertEqual(request.http_method, 'PUT') body_stream = request.body @@ -1854,20 +1826,14 @@ def test__send_chunk_w_total_size_stream_exhausted(self): stream = _Stream(CONTENT) upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) upload._initialize(http, self.UPLOAD_URL) - _called_with = [] response = object() - - def _send_media_request(request, end): - _called_with.append((request, end)) - return response - - upload._send_media_request = _send_media_request + streamer = _MediaStreamer(response) + upload._send_media_request = streamer found = upload._send_chunk(SIZE) self.assertTrue(found is response) - self.assertEqual(len(_called_with), 1) - request, end = _called_with[0] + request, end = streamer._called_with self.assertEqual(request.url, self.UPLOAD_URL) self.assertEqual(request.http_method, 'PUT') body_stream = request.body @@ -1972,9 +1938,9 @@ class _MediaStreamer(object): def __init__(self, response): self._response = response - def __call__(self, use_chunks=True): + def __call__(self, request, end): assert self._called_with is None - self._called_with = use_chunks + self._called_with = (request, end) return self._response From 7ad47bb0274a349ef9a9c28d19548c48902bd7c9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 16 Nov 2015 19:54:59 -0500 Subject: [PATCH 12/17] Remove pointless lambda wrapping '_Request'. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1209#issuecomment-157193581 --- gcloud/streaming/test_transfer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index 230224e75537..de09c8dc3e08 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -1402,8 +1402,7 @@ def test_initialize_upload_w_client_w_auto_transfer_w_OK(self): requester = _MakeRequest(response, chunk_response) with _Monkey(MUT, - Request=lambda url, http_method, body: - _Request(url, http_method, body), + Request=_Request, make_api_request=requester): upload.initialize_upload(request, client=client) @@ -1530,8 +1529,7 @@ def test_stream_file_incomplete(self): requester = _MakeRequest(response_1, response_2) with _Monkey(MUT, - Request=lambda url, http_method, body: - _Request(url, http_method, body), + Request=_Request, make_api_request=requester): response = upload.stream_file() @@ -1575,8 +1573,7 @@ def test_stream_file_incomplete_w_transfer_error(self): requester = _MakeRequest(response) with _Monkey(MUT, - Request=lambda url, http_method, body: - _Request(url, http_method, body), + Request=_Request, make_api_request=requester): with self.assertRaises(CommunicationError): upload.stream_file() From 8caed56adf0df27e6abde8d32c4cd710001c67f4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 16 Nov 2015 19:58:48 -0500 Subject: [PATCH 13/17] Drop notes-to-self from module docstrings. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1209#discussion_r44995738 --- gcloud/streaming/buffered_stream.py | 4 ---- gcloud/streaming/exceptions.py | 27 +-------------------------- gcloud/streaming/http_wrapper.py | 14 -------------- gcloud/streaming/stream_slice.py | 7 +------ gcloud/streaming/transfer.py | 13 +------------ gcloud/streaming/util.py | 14 +------------- 6 files changed, 4 insertions(+), 75 deletions(-) diff --git a/gcloud/streaming/buffered_stream.py b/gcloud/streaming/buffered_stream.py index 6e89b6c0df22..0920ea0499e2 100644 --- a/gcloud/streaming/buffered_stream.py +++ b/gcloud/streaming/buffered_stream.py @@ -2,10 +2,6 @@ """Small helper class to provide a small slice of a stream. This class reads ahead to detect if we are at the end of the stream. - -:mod:`gcloud._apidools.transfer` uses: - -- :class:`BufferedStream` """ from gcloud.streaming import exceptions diff --git a/gcloud/streaming/exceptions.py b/gcloud/streaming/exceptions.py index 97a3adf8ebe0..7c3e46d7ecf5 100644 --- a/gcloud/streaming/exceptions.py +++ b/gcloud/streaming/exceptions.py @@ -1,30 +1,5 @@ # pylint: skip-file -"""Exceptions for generated client libraries. - -:mod:`gcloud.streaming.http_wrapper` uses: - -- :exc:`BadStatusCodeError` -- :exc:`RequestError` -- :exc:`RetryAfterError` - -:mod:`gcloud._apidools.transfer` uses: - -- :exc:`CommunicationError` -- :exc:`ConfigurationValueError` -- :exc:`HttpError` -- :exc:`InvalidDataError` -- :exc:`InvalidUserInputError` -- :exc:`NotFoundError` -- :exc:`TransferInvalidError` -- :exc:`TransferRetryError` -- :exc:`UserError` - -:mod:`gcloud._apidools.util` uses: - -- :exc:`ConfigurationValueError` -- :exc:`InvalidUserInputError` -- :exc:`TypecheckError` -""" +"""Exceptions for generated client libraries.""" class Error(Exception): diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index 981809eb08de..3b2c1a0888bf 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -3,20 +3,6 @@ This library wraps the underlying http library we use, which is currently httplib2. - - -:mod:`gcloud.storage.blob` uses: - -- :class:`Request` -- :function:`make_api_request` - -:mod:`gcloud._apidools.transfer` uses: - -- :function:`get_http` -- :function:`handle_http_exceptions` -- :function:`make_api_request` -- :class:`Request` -- :const:`RESUME_INCOMPLETE` """ import collections diff --git a/gcloud/streaming/stream_slice.py b/gcloud/streaming/stream_slice.py index eedfcc5a5203..df3023d2b4ce 100644 --- a/gcloud/streaming/stream_slice.py +++ b/gcloud/streaming/stream_slice.py @@ -1,10 +1,5 @@ # pylint: skip-file -"""Small helper class to provide a small slice of a stream. - -:mod:`gcloud._apidools.transfer` uses: - -- :class:`StreamSlice` -""" +"""Small helper class to provide a small slice of a stream.""" from gcloud.streaming import exceptions diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index e2f53330ce79..5f7293ac61ec 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -1,16 +1,5 @@ # pylint: skip-file -"""Upload and download support for apitools. - -:mod:`gcloud.storage.blob` uses: - -- :class:`Download` -- :class:`Upload` -- :const:`RESUMABLE_UPLOAD` - -:mod:`gcloud.storage.test_blob` patches: - -- :const:`RESUMABLE_UPLOAD_THRESHOLD` -""" +"""Upload and download support for apitools.""" import email.generator as email_generator import email.mime.multipart as mime_multipart diff --git a/gcloud/streaming/util.py b/gcloud/streaming/util.py index ecdd44170bea..5cdaeaebd9eb 100644 --- a/gcloud/streaming/util.py +++ b/gcloud/streaming/util.py @@ -1,17 +1,5 @@ # pylint: skip-file -"""Assorted utilities shared between parts of apitools. - -Pruned to include only helpers used by other vendored-in modules: - -:mod:`gcloud.streaming.http_wrapper` uses: - -- :function:`calculate_wait_for_retry` - -:mod:`gcloud._apidools.transfer` uses: - -- :function:`type_check` -- :function:`acceptable_mime_type` -""" +"""Assorted utilities shared between parts of apitools.""" import random From 94fc9bf7e0106cd2c79822d56270d126fbe9ce53 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 16 Nov 2015 20:04:45 -0500 Subject: [PATCH 14/17] Drop legacy '__all__'. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1209#discussion-diff-44996372 --- gcloud/streaming/http_wrapper.py | 10 ---------- gcloud/streaming/transfer.py | 7 ------- 2 files changed, 17 deletions(-) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index 3b2c1a0888bf..c4208d892c28 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -21,16 +21,6 @@ from gcloud.streaming.exceptions import RetryAfterError from gcloud.streaming.util import calculate_wait_for_retry -__all__ = [ - 'get_http', - 'handle_http_exceptions', - 'make_api_request', - 'Request', - 'Response', - 'RESUME_INCOMPLETE', - 'TOO_MANY_REQUESTS', -] - # 308 and 429 don't have names in httplib. RESUME_INCOMPLETE = 308 diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 5f7293ac61ec..cf733e8e3b5a 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -29,13 +29,6 @@ from gcloud.streaming.util import acceptable_mime_type from gcloud.streaming.util import type_check -__all__ = [ - 'Download', - 'Upload', - 'RESUMABLE_UPLOAD', - 'RESUMABLE_UPLOAD_THRESHOLD', - 'SIMPLE_UPLOAD', -] RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 SIMPLE_UPLOAD = 'simple' From 42ded7d67354148767ac8520fc26a60a42fa6853 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 16 Nov 2015 20:25:04 -0500 Subject: [PATCH 15/17] Typo. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1209#discussion_r44998281 --- gcloud/streaming/transfer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index cf733e8e3b5a..2063d0fc2ace 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -726,7 +726,7 @@ def _validate_chunksize(self, chunksize=None): def stream_file(self, use_chunks=True): """Send this resumable upload - If 'use_chunks' is False, send it in a single reques. Otherwise, + If 'use_chunks' is False, send it in a single request. Otherwise, send it in chunks. """ if self.strategy != RESUMABLE_UPLOAD: From f66f1b82147f3d5651a637d8b97571f602081ac5 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 18 Nov 2015 15:15:44 -0500 Subject: [PATCH 16/17] Move quasi-docstring comments into docstring for '_ExceptionRetryArgs'. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1209#discussion_r44997161 --- gcloud/streaming/http_wrapper.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index c4208d892c28..711290cae791 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -33,13 +33,24 @@ RESUME_INCOMPLETE, ) -# http: An httplib2.Http instance. -# http_request: A http_wrapper.Request. -# exc: Exception being raised. -# num_retries: Number of retries consumed; used for exponential backoff. -_ExceptionRetryArgs = collections.namedtuple( +class _ExceptionRetryArgs(collections.namedtuple( '_ExceptionRetryArgs', ['http', 'http_request', 'exc', 'num_retries', - 'max_retry_wait']) + 'max_retry_wait'])): + """Bundle of information for retriable exceptions. + + :type http: :class:`httplib2.Http` (or conforming alternative) + :param http: instance used to perform requests. + + :type http_request: :class:`Request` + :param http_request: the request whose response was a retriable error + + :type exc: :class:`Exception` subclass + :param exc: the exception being raised. + + :type num_retries: integer + :param num_retries: Number of retries consumed; used for exponential + backoff. + """ @contextlib.contextmanager From 7e94700e993cec21787fe9176cbe4527a5f98d73 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 18 Nov 2015 15:20:11 -0500 Subject: [PATCH 17/17] Move nested '_process_content_range' helper out to module scope. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1209#discussion_r44997252 --- gcloud/streaming/http_wrapper.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index 711290cae791..cb75babb1d70 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -139,6 +139,23 @@ def body(self, value): self.loggable_body = '' +def _process_content_range(content_range): + """Convert a 'Content-Range' header into a length for the response. + + Helper for :meth:`Response.length`. + + :type content_range: string + :param content_range: the header value being parsed. + + :rtype: integer + :returns: the length of the response chunk. + """ + _, _, range_spec = content_range.partition(' ') + byte_range, _, _ = range_spec.partition('/') + start, _, end = byte_range.partition('-') + return int(end) - int(start) + 1 + + # Note: currently the order of fields here is important, since we want # to be able to pass in the result from httplib2.request. class Response(collections.namedtuple( @@ -160,12 +177,6 @@ def length(self): Returns: Response length (as int or long) """ - def _process_content_range(content_range): - _, _, range_spec = content_range.partition(' ') - byte_range, _, _ = range_spec.partition('/') - start, _, end = byte_range.partition('-') - return int(end) - int(start) + 1 - if 'content-encoding' in self.info and 'content-range' in self.info: # httplib2 rewrites content-length in the case of a compressed # transfer; we can't trust the content-length header in that