diff --git a/sdk/tables/azure-data-tables/CHANGELOG.md b/sdk/tables/azure-data-tables/CHANGELOG.md index 7e53b2e13c4a..0ab5c6b93f33 100644 --- a/sdk/tables/azure-data-tables/CHANGELOG.md +++ b/sdk/tables/azure-data-tables/CHANGELOG.md @@ -1,7 +1,16 @@ # Release History ## 12.0.0b7 (Unreleased) -* Fixed issue with Cosmos merge operations +**Breaking** +* Removed explicit `LinearRetry` and `ExponentialRetry` in favor of keyword parameter. +* Renamed `filter` parameter in query APIs to `query_filter`. +* The `location_mode` attribute on clients is now read-only. This has been added as a keyword parameter to the constructor. + +**Fixes** +* Fixed issue with Cosmos merge operations. +* Removed legacy Storage policies from pipeline. +* Removed unused legacy client-side encryption attributes from client classes. +* Fixed sharing of pipeline between service/table clients. ## 12.0.0b6 (2021-04-06) * Updated deserialization of datetime fields in entities to support preservation of the service format with additional decimal place. diff --git a/sdk/tables/azure-data-tables/README.md b/sdk/tables/azure-data-tables/README.md index 9a6f907c1f72..3de40fc36836 100644 --- a/sdk/tables/azure-data-tables/README.md +++ b/sdk/tables/azure-data-tables/README.md @@ -199,7 +199,7 @@ Querying entities in the table: from azure.data.tables import TableClient my_filter = "PartitionKey eq 'RedMarker'" table_client = TableClient.from_connection_string(conn_str="", table_name="mytable") -entities = table_client.query_entities(filter=my_filter) +entities = table_client.query_entities(my_filter) for entity in entities: for key in entity.keys(): print("Key: {}, Value: {}".format(key, entity[key])) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/__init__.py index 4b319dd17fcc..14ef89c6fa06 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/__init__.py @@ -25,7 +25,6 @@ BatchTransactionResult, BatchErrorException, ) -from ._policies import ExponentialRetry, LinearRetry from ._version import VERSION from ._deserialize import TableErrorCode from ._table_batch import TableBatchOperations @@ -35,8 +34,6 @@ __all__ = [ "TableClient", "TableServiceClient", - "ExponentialRetry", - "LinearRetry", "LocationMode", "ResourceTypes", "AccountSasPermissions", diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_authentication.py b/sdk/tables/azure-data-tables/azure/data/tables/_authentication.py index f1c493e8be29..b0dfbd5d56e0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_authentication.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_authentication.py @@ -24,8 +24,6 @@ except ImportError: pass -from ._constants import DEV_ACCOUNT_NAME, DEV_ACCOUNT_SECONDARY_NAME - from ._common_conversion import ( _sign_string, ) @@ -87,13 +85,6 @@ def _get_canonicalized_resource(self, request): return "/" + self.account_name + str(uri_path) except TypeError: pass - - # for emulator, use the DEV_ACCOUNT_NAME instead of DEV_ACCOUNT_SECONDARY_NAME - # as this is how the emulator works - if self.is_emulated and uri_path.find(DEV_ACCOUNT_SECONDARY_NAME) == 1: - # only replace the first instance - uri_path = uri_path.replace(DEV_ACCOUNT_SECONDARY_NAME, DEV_ACCOUNT_NAME, 1) - return "/" + self.account_name + uri_path def _get_canonicalized_headers(self, request): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py index de9a12415e25..d1fc6fd5a909 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py @@ -4,23 +4,18 @@ # license information. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -import logging -from uuid import uuid4, UUID -from datetime import datetime -import six - +from typing import Dict, Optional, Any, List +from uuid import uuid4 try: - from urllib.parse import parse_qs, quote + from urllib.parse import parse_qs, quote, urlparse except ImportError: - from urlparse import parse_qs # type: ignore + from urlparse import parse_qs, urlparse # type: ignore from urllib2 import quote # type: ignore -from azure.core.configuration import Configuration +import six from azure.core.credentials import AzureSasCredential +from azure.core.utils import parse_connection_string from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError -from azure.core.pipeline import Pipeline from azure.core.pipeline.transport import ( HttpTransport, HttpRequest, @@ -33,74 +28,78 @@ DistributedTracingPolicy, HttpLoggingPolicy, UserAgentPolicy, - AzureSasCredentialPolicy + AzureSasCredentialPolicy, + NetworkTraceLoggingPolicy, + CustomHookPolicy, + RequestIdPolicy ) -from ._common_conversion import _to_utc_datetime, _is_cosmos_endpoint +from ._generated import AzureTable +from ._common_conversion import _is_cosmos_endpoint from ._shared_access_signature import QueryStringConstants from ._constants import ( STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, - CONNECTION_TIMEOUT, - READ_TIMEOUT, ) from ._models import LocationMode, BatchTransactionResult from ._authentication import SharedKeyCredentialPolicy from ._policies import ( CosmosPatchTransformPolicy, StorageHeadersPolicy, - StorageContentValidation, - StorageRequestHook, - StorageResponseHook, - StorageLoggingPolicy, StorageHosts, TablesRetryPolicy, ) from ._models import BatchErrorException from ._sdk_moniker import SDK_MONIKER -if TYPE_CHECKING: - from typing import ( # pylint: disable=ungrouped-imports - Union, - Optional, - Any, - Iterable, - Dict, - List, - Type, - Tuple, - ) - -_LOGGER = logging.getLogger(__name__) -_SERVICE_PARAMS = { - "blob": {"primary": "BlobEndpoint", "secondary": "BlobSecondaryEndpoint"}, - "queue": {"primary": "QueueEndpoint", "secondary": "QueueSecondaryEndpoint"}, - "file": {"primary": "FileEndpoint", "secondary": "FileSecondaryEndpoint"}, - "dfs": {"primary": "BlobEndpoint", "secondary": "BlobEndpoint"}, - "table": {"primary": "TableEndpoint", "secondary": "TableSecondaryEndpoint"}, -} - - -class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes +_SUPPORTED_API_VERSIONS = ["2019-02-02", "2019-07-07"] + + +def get_api_version(kwargs, default): + # type: (Dict[str, Any], str) -> str + api_version = kwargs.pop("api_version", None) + if api_version and api_version not in _SUPPORTED_API_VERSIONS: + versions = "\n".join(_SUPPORTED_API_VERSIONS) + raise ValueError( + "Unsupported API version '{}'. Please select from:\n{}".format( + api_version, versions + ) + ) + return api_version or default + + +class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes def __init__( self, - parsed_url, # type: Any - service, # type: str + account_url, # type: Any credential=None, # type: Optional[Any] **kwargs # type: Any ): # type: (...) -> None - self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY) + try: + if not account_url.lower().startswith("http"): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + self._cosmos_endpoint = _is_cosmos_endpoint(account_url) + parsed_url = urlparse(account_url.rstrip("/")) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + if not sas_token and not credential: + raise ValueError( + "You need to provide either a SAS token or an account shared key to authenticate." + ) + self._query_str, credential = format_query_string(sas_token, credential) + self._location_mode = kwargs.get("location_mode", LocationMode.PRIMARY) self._hosts = kwargs.get("_hosts") self.scheme = parsed_url.scheme self._cosmos_endpoint = _is_cosmos_endpoint(parsed_url.hostname) - if service not in ["blob", "queue", "file-share", "dfs", "table"]: - raise ValueError("Invalid service: {}".format(service)) - service_name = service.split("-")[0] - account = parsed_url.netloc.split(".{}.core.".format(service_name)) + account = parsed_url.netloc.split(".table.core.") if "cosmos" in parsed_url.netloc: - account = parsed_url.netloc.split(".{}.cosmos.".format(service_name)) + account = parsed_url.netloc.split(".table.cosmos.") self.account_name = account[0] if len(account) > 1 else None secondary_hostname = None @@ -109,8 +108,8 @@ def __init__( raise ValueError("Token credential is only supported with HTTPS.") if hasattr(self.credential, "account_name"): self.account_name = self.credential.account_name - secondary_hostname = "{}-secondary.{}.{}".format( - self.credential.account_name, service_name, SERVICE_HOST_BASE + secondary_hostname = "{}-secondary.table.{}".format( + self.credential.account_name, SERVICE_HOST_BASE ) if not self._hosts: @@ -125,48 +124,12 @@ def __init__( LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname, } - - self.require_encryption = kwargs.get("require_encryption", False) - self.key_encryption_key = kwargs.get("key_encryption_key") - self.key_resolver_function = kwargs.get("key_resolver_function") - + self._credential_policy = None self._configure_credential(self.credential) - kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) - kwargs.setdefault("read_timeout", READ_TIMEOUT) - - self._policies = [ - StorageHeadersPolicy(**kwargs), - ProxyPolicy(**kwargs), - UserAgentPolicy(sdk_moniker=SDK_MONIKER, **kwargs), - StorageContentValidation(), - StorageRequestHook(**kwargs), - self._credential_policy, - ContentDecodePolicy(response_encoding="utf-8"), - RedirectPolicy(**kwargs), - StorageHosts(hosts=self._hosts, **kwargs), - kwargs.get("retry_policy") or TablesRetryPolicy(**kwargs), - StorageLoggingPolicy(**kwargs), - StorageResponseHook(**kwargs), - DistributedTracingPolicy(**kwargs), - HttpLoggingPolicy(**kwargs), - ] - + self._policies = self._configure_policies(hosts=self._hosts, **kwargs) if self._cosmos_endpoint: self._policies.insert(0, CosmosPatchTransformPolicy()) - def __enter__(self): - self._client.__enter__() - return self - - def __exit__(self, *args): - self._client.__exit__(*args) - - def close(self): - """This method is to close the sockets opened by the client. - It need not be used when using with a context manager. - """ - self._client.close() - @property def url(self): """The full endpoint URL to this entity, including SAS token if used. @@ -228,14 +191,6 @@ def location_mode(self): return self._location_mode - @location_mode.setter - def location_mode(self, value): - if self._hosts.get(value): - self._location_mode = value - self._client._config.url = self.url # pylint: disable=protected-access - else: - raise ValueError("No host URL for location mode: {}".format(value)) - @property def api_version(self): """The version of the Storage API used for requests. @@ -244,27 +199,50 @@ def api_version(self): """ return self._client._config.version # pylint: disable=protected-access - def _format_query_string( - self, sas_token, credential, snapshot=None, share_snapshot=None + +class TablesBaseClient(AccountHostsMixin): + + def __init__( + self, + account_url, # type: str + credential=None, # type: str + **kwargs # type: Any ): - query_str = "?" - if snapshot: - query_str += "snapshot={}&".format(self.snapshot) - if share_snapshot: - query_str += "sharesnapshot={}&".format(self.snapshot) - if sas_token and isinstance(credential, AzureSasCredential): - raise ValueError( - "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") - if sas_token and not credential: - query_str += sas_token - elif is_credential_sastoken(credential): - query_str += credential.lstrip("?") - credential = None - return query_str.rstrip("?&"), credential + # type: (...) -> None + super(TablesBaseClient, self).__init__(account_url, credential=credential, **kwargs) + self._client = AzureTable( + self.url, + policies=kwargs.pop('policies', self._policies), + **kwargs + ) + self._client._config.version = get_api_version(kwargs, self._client._config.version) # pylint: disable=protected-access + + def __enter__(self): + self._client.__enter__() + return self + + def __exit__(self, *args): + self._client.__exit__(*args) + + def _configure_policies(self, **kwargs): + return [ + RequestIdPolicy(**kwargs), + StorageHeadersPolicy(**kwargs), + UserAgentPolicy(sdk_moniker=SDK_MONIKER, **kwargs), + ProxyPolicy(**kwargs), + self._credential_policy, + ContentDecodePolicy(response_encoding="utf-8"), + RedirectPolicy(**kwargs), + StorageHosts(**kwargs), + TablesRetryPolicy(**kwargs), + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] def _configure_credential(self, credential): - # type: (Any, **Any) -> Tuple[Configuration, Pipeline] - self._credential_policy = None + # type: (Any) -> None if hasattr(credential, "get_token"): self._credential_policy = BearerTokenCredentialPolicy( credential, STORAGE_OAUTH_SCOPE @@ -343,33 +321,12 @@ def _batch_send( ) return transaction_result - def _parameter_filter_substitution( # pylint: disable=no-self-use - self, - parameters, # type: dict[str,str] - filter # type: str pylint: disable=redefined-builtin - ): - """Replace user defined parameter in filter - :param parameters: User defined parameters - :param filter: Filter for querying + def close(self): + # type: () -> None + """This method is to close the sockets opened by the client. + It need not be used when using with a context manager. """ - if parameters: - filter_strings = filter.split(' ') - for index, word in enumerate(filter_strings): - if word[0] == u'@': - val = parameters[word[1:]] - if val in [True, False]: - filter_strings[index] = str(val).lower() - elif isinstance(val, (float, six.integer_types)): - filter_strings[index] = str(val) - elif isinstance(val, datetime): - filter_strings[index] = "datetime'{}'".format(_to_utc_datetime(val)) - elif isinstance(val, UUID): - filter_strings[index] = "guid'{}'".format(str(val)) - else: - filter_strings[index] = "'{}'".format(val.replace("'", "''")) - return ' '.join(filter_strings) - - return filter + self._client.close() class TransportWrapper(HttpTransport): @@ -377,7 +334,6 @@ class TransportWrapper(HttpTransport): by a `get_client` method does not close the outer transport for the parent when used in a context manager. """ - def __init__(self, transport): self._transport = transport @@ -393,7 +349,7 @@ def close(self): def __enter__(self): pass - def __exit__(self, *args): + def __exit__(self, *args): # pylint: disable=arguments-differ pass @@ -413,49 +369,41 @@ def format_shared_key_credential(account, credential): return credential -def parse_connection_str(conn_str, credential, service, keyword_args): - conn_str = conn_str.rstrip(";") - conn_settings = [s.split("=", 1) for s in conn_str.split(";")] - if any(len(tup) != 2 for tup in conn_settings): - raise ValueError("Connection string is either blank or malformed.") - conn_settings = dict(conn_settings) - endpoints = _SERVICE_PARAMS[service] +def parse_connection_str(conn_str, credential, keyword_args): + conn_settings = parse_connection_string(conn_str) primary = None secondary = None if not credential: try: credential = { - "account_name": conn_settings["AccountName"], - "account_key": conn_settings["AccountKey"], + "account_name": conn_settings["accountname"], + "account_key": conn_settings["accountkey"], } except KeyError: - credential = conn_settings.get("SharedAccessSignature") - if endpoints["primary"] in conn_settings: - primary = conn_settings[endpoints["primary"]] - if endpoints["secondary"] in conn_settings: - secondary = conn_settings[endpoints["secondary"]] - else: - if endpoints["secondary"] in conn_settings: + credential = conn_settings.get("sharedaccesssignature") + + primary = conn_settings.get("tableendpoint") + secondary = conn_settings.get("tablesecondaryendpoint") + if not primary: + if secondary: raise ValueError("Connection string specifies only secondary endpoint.") try: - primary = "{}://{}.{}.{}".format( - conn_settings["DefaultEndpointsProtocol"], - conn_settings["AccountName"], - service, - conn_settings["EndpointSuffix"], + primary = "{}://{}.table.{}".format( + conn_settings["defaultendpointsprotocol"], + conn_settings["accountname"], + conn_settings["endpointsuffix"], ) - secondary = "{}-secondary.{}.{}".format( - conn_settings["AccountName"], service, conn_settings["EndpointSuffix"] + secondary = "{}-secondary.table.{}".format( + conn_settings["accountname"], conn_settings["endpointsuffix"] ) except KeyError: pass if not primary: try: - primary = "https://{}.{}.{}".format( - conn_settings["AccountName"], - service, - conn_settings.get("EndpointSuffix", SERVICE_HOST_BASE), + primary = "https://{}.table.{}".format( + conn_settings["accountname"], + conn_settings.get("endpointsuffix", SERVICE_HOST_BASE), ) except KeyError: raise ValueError("Connection string missing required connection details.") @@ -466,6 +414,19 @@ def parse_connection_str(conn_str, credential, service, keyword_args): return primary, credential +def format_query_string(sas_token, credential): + query_str = "?" + if sas_token and isinstance(credential, AzureSasCredential): + raise ValueError( + "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") + if sas_token and not credential: + query_str += sas_token + elif is_credential_sastoken(credential): + query_str += credential.lstrip("?") + credential = None + return query_str.rstrip("?&"), credential + + def parse_query(query_str): sas_values = QueryStringConstants.to_list() parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_constants.py b/sdk/tables/azure-data-tables/azure/data/tables/_constants.py index 979f7700d5d8..924b4c01bcf1 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_constants.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_constants.py @@ -3,55 +3,14 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import sys from ._generated._version import VERSION # default values for common package, in case it is used directly DEFAULT_X_MS_VERSION = "2018-03-28" +X_MS_VERSION = VERSION # Live ServiceClient URLs SERVICE_HOST_BASE = "core.windows.net" -DEFAULT_PROTOCOL = "https" - -# Development ServiceClient URLs -DEV_BLOB_HOST = "127.0.0.1:10000" -DEV_QUEUE_HOST = "127.0.0.1:10001" - -# Default credentials for Development Storage Service -DEV_ACCOUNT_NAME = "devstoreaccount1" -DEV_ACCOUNT_SECONDARY_NAME = "devstoreaccount1-secondary" -DEV_ACCOUNT_KEY = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==" - -# Socket timeout in seconds -DEFAULT_SOCKET_TIMEOUT = 20 - -# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) -# The socket timeout is now the maximum total duration to send all data. -if sys.version_info >= (3, 5): - # the timeout to connect is 20 seconds, and the read timeout is 2000 seconds - # the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) - DEFAULT_SOCKET_TIMEOUT = (20, 2000) - -# Encryption constants -_ENCRYPTION_PROTOCOL_V1 = "1.0" - -_AUTHORIZATION_HEADER_NAME = "Authorization" -_COPY_SOURCE_HEADER_NAME = "x-ms-copy-source" -_REDACTED_VALUE = "REDACTED" - - -X_MS_VERSION = VERSION - -# Socket timeout in seconds -CONNECTION_TIMEOUT = 20 -READ_TIMEOUT = 20 - -# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned) -# The socket timeout is now the maximum total duration to send all data. -if sys.version_info >= (3, 5): - # the timeout to connect is 20 seconds, and the read timeout is 2000 seconds - # the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed) - READ_TIMEOUT = 2000 STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_deserialize.py b/sdk/tables/azure-data-tables/azure/data/tables/_deserialize.py index 64b8cd7d5553..d5eba3ed521b 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_deserialize.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_deserialize.py @@ -280,11 +280,11 @@ def _return_headers_and_deserialized( def _return_context_and_deserialized( response, deserialized, response_headers ): # pylint: disable=unused-argument - return response.http_response.location_mode, deserialized, response_headers + return response.context['location_mode'], deserialized, response_headers def _trim_service_metadata(metadata): - # type: (dict[str,str] -> None) + # type: (dict[str,str]) -> None return { "date": metadata.pop("date", None), "etag": metadata.pop("etag", None), diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_error.py b/sdk/tables/azure-data-tables/azure/data/tables/_error.py index 506f328c32b6..45dd0dae2e56 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_error.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_error.py @@ -80,7 +80,7 @@ def _process_table_error(storage_error): storage_error.response ) if isinstance(error_body, dict): - for info in error_body["odata.error"]: + for info in error_body.get("odata.error", {}): if info == "code": error_code = error_body["odata.error"][info] elif info == "message": diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_policies.py b/sdk/tables/azure-data-tables/azure/data/tables/_policies.py index f7de7af5ecdf..2fc780e731a0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_policies.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_policies.py @@ -4,225 +4,59 @@ # license information. # -------------------------------------------------------------------------- -import base64 -import hashlib -import re -import random -from time import time -from io import SEEK_SET, UnsupportedOperation -import logging -import uuid -import types -from typing import Any, TYPE_CHECKING +import time +from typing import Any, TYPE_CHECKING, Dict from wsgiref.handlers import format_date_time - try: - from urllib.parse import ( - urlparse, - parse_qsl, - urlunparse, - urlencode, - ) + from urllib.parse import urlparse except ImportError: - from urllib import urlencode # type: ignore - from urlparse import ( # type: ignore - urlparse, - parse_qsl, - urlunparse, - ) + from urlparse import urlparse # type: ignore from azure.core.pipeline.policies import ( HeadersPolicy, SansIOHTTPPolicy, - NetworkTraceLoggingPolicy, - HTTPPolicy, - RequestHistory, RetryPolicy, ) -from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError +from azure.core.exceptions import AzureError, ServiceRequestError, ClientAuthenticationError from ._common_conversion import _transform_patch_to_cosmos_post from ._models import LocationMode -try: - _unicode_type = unicode # type: ignore -except NameError: - _unicode_type = str - if TYPE_CHECKING: - from azure.core.pipeline import PipelineRequest, PipelineResponse - -_LOGGER = logging.getLogger(__name__) - - -def encode_base64(data): - if isinstance(data, _unicode_type): - data = data.encode("utf-8") - encoded = base64.b64encode(data) - return encoded.decode("utf-8") - - -def is_exhausted(settings): - """Are we out of retries?""" - retry_counts = ( - settings["total"], - settings["connect"], - settings["read"], - settings["status"], - ) - retry_counts = list(filter(None, retry_counts)) - if not retry_counts: - return False - return min(retry_counts) < 0 - - -def retry_hook(settings, **kwargs): - if settings["hook"]: - settings["hook"]( - retry_count=settings["count"] - 1, location_mode=settings["mode"], **kwargs - ) + from azure.core.pipeline import PipelineRequest -def is_retry(response, mode): - """Is this method/status code retryable? (Based on whitelists and control - variables such as the number of total retries to allow, whether to - respect the Retry-After header, whether this header is present, and - whether the returned status code is on the list of status codes to - be retried upon on the presence of the aforementioned header) - """ - status = response.http_response.status_code - if 300 <= status < 500: - # An exception occured, but in most cases it was expected. Examples could - # include a 309 Conflict or 412 Precondition Failed. - if status == 404 and mode == LocationMode.SECONDARY: - # Response code 404 should be retried if secondary was used. - return True - if status == 408: - # Response code 408 is a timeout and should be retried. - return True - return False - if status >= 500: - # Response codes above 500 with the exception of 501 Not Implemented and - # 505 Version Not Supported indicate a server issue and should be retried. - if status in [501, 505]: - return False - return True - return False - def set_next_host_location(settings, request): + # type: (Dict[str, Any], PipelineRequest) -> None """ A function which sets the next host location on the request, if applicable. - - :param ~azure.storage.models.RetryContext context: - The retry context containing the previous host location and the request - to evaluate and possibly modify. - """ - if settings["hosts"] and all(settings["hosts"].values()): - url = urlparse(request.url) - # If there's more than one possible location, retry to the alternative - if settings["mode"] == LocationMode.PRIMARY: - settings["mode"] = LocationMode.SECONDARY - else: - settings["mode"] = LocationMode.PRIMARY - updated = url._replace(netloc=settings["hosts"].get(settings["mode"])) - request.url = updated.geturl() - -def increment(settings, request, response=None, error=None): - """Increment the retry counters. - - :param Any request: - :param dict settings: - :param Any response: A pipeline response object. - :param Any error: An error encountered during the request, or - None if the response was received successfully. - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Whether the retry attempts are exhausted. - :rtype: None """ - settings["total"] -= 1 - - if error and isinstance(error, ServiceRequestError): - # Errors when we're fairly sure that the server did not receive the - # request, so it should be safe to retry. - settings["connect"] -= 1 - settings["history"].append(RequestHistory(request, error=error)) - - elif error and isinstance(error, ServiceResponseError): - # Errors that occur after the request has been started, so we should - # assume that the server began processing it. - settings["read"] -= 1 - settings["history"].append(RequestHistory(request, error=error)) - - else: - # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the whitelist - if response: - settings["status"] -= 1 - settings["history"].append( - RequestHistory(request, http_response=response) - ) - - if not is_exhausted(settings): - if request.method not in ["PUT"] and settings["retry_secondary"]: - set_next_host_location(settings, request) - - # rewind the request body if it is a stream - if request.body and hasattr(request.body, "read"): - # no position was saved, then retry would not work - if settings["body_position"] is None: - return False - try: - # attempt to rewind the body to the initial position - request.body.seek(settings["body_position"], SEEK_SET) - except (UnsupportedOperation, ValueError): - # if body is not seekable, then retry would not work - return False - settings["count"] += 1 - return True - return False - - -def urljoin(base_url, stub_url): - parsed = urlparse(base_url) - parsed = parsed._replace(path=parsed.path + "/" + stub_url) - return parsed.geturl() + if request.http_request.method not in ['GET', 'HEAD']: + return + try: + if settings["retry_secondary"] and settings["hosts"] and all(settings["hosts"].values()): + url = urlparse(request.http_request.url) + # If there's more than one possible location, retry to the alternative + if settings["mode"] == LocationMode.PRIMARY: + settings["mode"] = LocationMode.SECONDARY + else: + settings["mode"] = LocationMode.PRIMARY + updated = url._replace(netloc=settings["hosts"].get(settings["mode"])) + request.http_request.url = updated.geturl() + except KeyError: + pass class StorageHeadersPolicy(HeadersPolicy): - request_id_header_name = "x-ms-client-request-id" def on_request(self, request): # type: (PipelineRequest, Any) -> None super(StorageHeadersPolicy, self).on_request(request) - current_time = format_date_time(time()) + + # Add required date headers + current_time = format_date_time(time.time()) request.http_request.headers["x-ms-date"] = current_time request.http_request.headers["Date"] = current_time - custom_id = request.context.options.pop("client_request_id", None) - request.http_request.headers["x-ms-client-request-id"] = custom_id or str( - uuid.uuid1() - ) - - def on_response(self, request, response): - # raise exception if the echoed client request id from the service is not identical to the one we sent - if self.request_id_header_name in response.http_response.headers: - - client_request_id = request.http_request.headers.get( - self.request_id_header_name - ) - - if ( - response.http_response.headers[self.request_id_header_name] - != client_request_id - ): - raise AzureError( - "Echoed client request ID: {} does not match sent client request ID: {}. " - "Service request ID: {}".format( - response.http_response.headers[self.request_id_header_name], - client_request_id, - response.http_response.headers["x-ms-request-id"], - ), - response=response.http_response, - ) class StorageHosts(SansIOHTTPPolicy): @@ -231,7 +65,7 @@ def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument super(StorageHosts, self).__init__() def on_request(self, request): - # type: (PipelineRequest, Any) -> None + # type: (PipelineRequest) -> None request.context.options["hosts"] = self.hosts parsed_url = urlparse(request.http_request.url) @@ -259,498 +93,143 @@ def on_request(self, request): request.context.options["location_mode"] = location_mode -class StorageLoggingPolicy(NetworkTraceLoggingPolicy): - """A policy that logs HTTP request and response to the DEBUG logger. - - This accepts both global configuration, and per-request level with "enable_http_logger" - """ +class TablesRetryPolicy(RetryPolicy): + """A retry policy. - def on_request(self, request): - # type: (PipelineRequest, Any) -> None - http_request = request.http_request - options = request.context.options - if options.pop("logging_enable", self.enable_http_logger): - request.context["logging_enable"] = True - if not _LOGGER.isEnabledFor(logging.DEBUG): - return + The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis. - try: - log_url = http_request.url - query_params = http_request.query - if "sig" in query_params: - log_url = log_url.replace(query_params["sig"], "sig=*****") - _LOGGER.debug("Request URL: %r", log_url) - _LOGGER.debug("Request method: %r", http_request.method) - _LOGGER.debug("Request headers:") - for header, value in http_request.headers.items(): - if header.lower() == "authorization": - value = "*****" - elif header.lower() == "x-ms-copy-source" and "sig" in value: - # take the url apart and scrub away the signed signature - scheme, netloc, path, params, query, fragment = urlparse(value) - parsed_qs = dict(parse_qsl(query)) - parsed_qs["sig"] = "*****" - - # the SAS needs to be put back together - value = urlunparse( - ( - scheme, - netloc, - path, - params, - urlencode(parsed_qs), - fragment, - ) - ) - - _LOGGER.debug(" %r: %r", header, value) - _LOGGER.debug("Request body:") - - # We don't want to log the binary data of a file upload. - if isinstance(http_request.body, types.GeneratorType): - _LOGGER.debug("File upload") - else: - _LOGGER.debug(str(http_request.body)) - except Exception as err: # pylint: disable=broad-except - _LOGGER.debug("Failed to log request: %r", err) - - def on_response(self, request, response): - # type: (PipelineRequest, PipelineResponse, Any) -> None - if response.context.pop("logging_enable", self.enable_http_logger): - if not _LOGGER.isEnabledFor(logging.DEBUG): - return + :keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host + location. Default value is False. - try: - _LOGGER.debug("Response status: %r", response.http_response.status_code) - _LOGGER.debug("Response headers:") - for res_header, value in response.http_response.headers.items(): - _LOGGER.debug(" %r: %r", res_header, value) - - # We don't want to log binary data if the response is a file. - _LOGGER.debug("Response content:") - pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) - header = response.http_response.headers.get("content-disposition") - - if header and pattern.match(header): - filename = header.partition("=")[2] - _LOGGER.debug("File attachments: %s", filename) - elif response.http_response.headers.get("content-type", "").endswith( - "octet-stream" - ): - _LOGGER.debug("Body contains binary data.") - elif response.http_response.headers.get("content-type", "").startswith( - "image" - ): - _LOGGER.debug("Body contains image data.") - else: - if response.context.options.get("stream", False): - _LOGGER.debug("Body is streamable") - else: - _LOGGER.debug(response.http_response.text()) - except Exception as err: # pylint: disable=broad-except - _LOGGER.debug("Failed to log response: %s", repr(err)) - - -class StorageRequestHook(SansIOHTTPPolicy): - def __init__(self, **kwargs): - self._request_callback = kwargs.get("raw_request_hook") - super(StorageRequestHook, self).__init__() + :keyword int retry_total: Total number of retries to allow. Takes precedence over other counts. + Default value is 10. - def on_request(self, request): - # type: (PipelineRequest, **Any) -> PipelineResponse - request_callback = request.context.options.pop( - "raw_request_hook", self._request_callback - ) - if request_callback: - request_callback(request) + :keyword int retry_connect: How many connection-related errors to retry on. + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. Default value is 3. + :keyword int retry_read: How many times to retry on read errors. + These errors are raised after the request was sent to the server, so the + request may have side-effects. Default value is 3. -class StorageResponseHook(HTTPPolicy): - def __init__(self, **kwargs): - self._response_callback = kwargs.get("raw_response_hook") - super(StorageResponseHook, self).__init__() + :keyword int retry_status: How many times to retry on bad status codes. Default value is 3. - def send(self, request): - # type: (PipelineRequest) -> PipelineResponse - data_stream_total = request.context.get( - "data_stream_total" - ) or request.context.options.pop("data_stream_total", None) - download_stream_current = request.context.get( - "download_stream_current" - ) or request.context.options.pop("download_stream_current", None) - upload_stream_current = request.context.get( - "upload_stream_current" - ) or request.context.options.pop("upload_stream_current", None) - response_callback = request.context.get( - "response_callback" - ) or request.context.options.pop("raw_response_hook", self._response_callback) - - response = self.next.send(request) - will_retry = is_retry(response, request.context.options.get("mode")) - if not will_retry and download_stream_current is not None: - download_stream_current += int( - response.http_response.headers.get("Content-Length", 0) - ) - if data_stream_total is None: - content_range = response.http_response.headers.get("Content-Range") - if content_range: - data_stream_total = int( - content_range.split(" ", 1)[1].split("/", 1)[1] - ) - else: - data_stream_total = download_stream_current - elif not will_retry and upload_stream_current is not None: - upload_stream_current += int( - response.http_request.headers.get("Content-Length", 0) - ) - for pipeline_obj in [request, response]: - pipeline_obj.context["data_stream_total"] = data_stream_total - pipeline_obj.context["download_stream_current"] = download_stream_current - pipeline_obj.context["upload_stream_current"] = upload_stream_current - if response_callback: - response_callback(response) - request.context["response_callback"] = response_callback - return response + :keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a delay). + In fixed mode, retry policy will alwasy sleep for {backoff factor}. + In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` + seconds. If the backoff_factor is 0.1, then the retry will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8. + :keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes). -class StorageContentValidation(SansIOHTTPPolicy): - """A simple policy that sends the given headers - with the request. + :keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential. - This will overwrite any headers already defined in the request. + :keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days). """ - header_name = "Content-MD5" - - def __init__(self, **kwargs): # pylint: disable=unused-argument - super(StorageContentValidation, self).__init__() - - @staticmethod - def get_content_md5(data): - md5 = hashlib.md5() # nosec - if isinstance(data, bytes): - md5.update(data) - elif hasattr(data, "read"): - pos = 0 - try: - pos = data.tell() - except: # pylint: disable=bare-except - pass - for chunk in iter(lambda: data.read(4096), b""): - md5.update(chunk) - try: - data.seek(pos, SEEK_SET) - except (AttributeError, IOError): - raise ValueError("Data should be bytes or a seekable file-like object.") - else: - raise ValueError("Data should be bytes or a seekable file-like object.") - - return md5.digest() + def __init__(self, **kwargs): + super(TablesRetryPolicy, self).__init__(**kwargs) + self.retry_to_secondary = kwargs.get('retry_to_secondary', False) + + def is_retry(self, settings, response): + """Is this method/status code retryable? (Based on whitelists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + should_retry = super(TablesRetryPolicy, self).is_retry(settings, response) + status = response.http_response.status_code + if status == 404 and settings['mode'] == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + return should_retry - def on_request(self, request): - # type: (PipelineRequest, Any) -> None - validate_content = request.context.options.pop("validate_content", False) - if validate_content and request.http_request.method != "GET": - computed_md5 = encode_base64( - StorageContentValidation.get_content_md5(request.http_request.data) - ) - request.http_request.headers[self.header_name] = computed_md5 - request.context["validate_content_md5"] = computed_md5 - request.context["validate_content"] = validate_content - - def on_response(self, request, response): - if response.context.get( - "validate_content", False - ) and response.http_response.headers.get("content-md5"): - computed_md5 = request.context.get("validate_content_md5") or encode_base64( - StorageContentValidation.get_content_md5(response.http_response.body()) - ) - if response.http_response.headers["content-md5"] != computed_md5: - raise AzureError( - "MD5 mismatch. Expected value is '{0}', computed value is '{1}'.".format( - response.http_response.headers["content-md5"], computed_md5 - ), - response=response.http_response, - ) + def configure_retries(self, options): + """Configures the retry settings. + :param options: keyword arguments from context. + :return: A dict containing settings and history for retries. + :rtype: dict + """ + config = super(TablesRetryPolicy, self).configure_retries(options) + config["retry_secondary"] = options.pop("retry_to_secondary", self.retry_to_secondary) + config["mode"] = options.pop("location_mode", LocationMode.PRIMARY) + config["hosts"] = options.pop("hosts", None) + return config -class TablesRetryPolicy(RetryPolicy): - """ - A base class for retry policies for the Table Client and Table Service Client - """ + def update_context(self, context, retry_settings): + """Updates retry history in pipeline context. - def __init__( - self, - initial_backoff=15, # type: int - increment_base=3, # type: int - retry_total=10, # type: int - retry_to_secondary=False, # type: bool - random_jitter_range=3, # type: int - **kwargs # type: Any - ): - """ - Build a TablesRetryPolicy object. - - :param int initial_backoff: - The initial backoff interval, in seconds, for the first retry. - :param int increment_base: - The base, in seconds, to increment the initial_backoff by after the - first retry. - :param int retry_total: total number of retries - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + :param context: The pipeline context. + :type context: ~azure.core.pipeline.PipelineContext + :param retry_settings: The retry settings. + :type retry_settings: dict """ - self.initial_backoff = initial_backoff - self.increment_base = increment_base - self.random_jitter_range = random_jitter_range - self.total_retries = retry_total - self.connect_retries = kwargs.pop("retry_connect", 3) - self.read_retries = kwargs.pop("retry_read", 3) - self.status_retries = kwargs.pop("retry_status", 3) - self.retry_to_secondary = retry_to_secondary - super(TablesRetryPolicy, self).__init__(**kwargs) + super(TablesRetryPolicy, self).update_context(context, retry_settings) + context['location_mode'] = retry_settings['mode'] - def get_backoff_time(self, settings): - """ - Calculates how long to sleep before retrying. - :param dict settings: - :keyword callable cls: A custom type or function that will be passed the direct response - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - backoff = self.initial_backoff + ( - 0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]) - ) - random_range_start = ( - backoff - self.random_jitter_range - if backoff > self.random_jitter_range - else 0 - ) - random_range_end = backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - def configure_retries( - self, request - ): # pylint: disable=no-self-use, arguments-differ - # type: (...) -> Dict[Any, Any] - """ - :param Any request: - :param kwargs: - :return: - :rtype:dict - """ - body_position = None - if hasattr(request.http_request.body, "read"): - try: - body_position = request.http_request.body.tell() - except (AttributeError, UnsupportedOperation): - # if body position cannot be obtained, then retries will not work - pass - options = request.context.options - return { - "total": options.pop("retry_total", self.total_retries), - "connect": options.pop("retry_connect", self.connect_retries), - "read": options.pop("retry_read", self.read_retries), - "status": options.pop("retry_status", self.status_retries), - "retry_secondary": options.pop( - "retry_to_secondary", self.retry_to_secondary - ), - "mode": options.pop("location_mode", LocationMode.PRIMARY), - "hosts": options.pop("hosts", None), - "hook": options.pop("retry_hook", None), - "body_position": body_position, - "count": 0, - "history": [], - } - - def sleep(self, settings, transport): # pylint: disable=arguments-differ - # type: (...) -> None - """ - :param Any settings: - :param Any transport: - :return:None + def update_request(self, request, retry_settings): # pylint:disable=no-self-use + """Updates the pipeline request before attempting to retry. + + :param PipelineRequest request: The outgoing request. + :param dict(str, Any) retry_settings: The current retry context settings. """ - backoff = self.get_backoff_time( - settings, - ) - if not backoff or backoff < 0: - return - transport.sleep(backoff) + set_next_host_location(retry_settings, request) def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raises: ~azure.core.exceptions.ClientAuthenticationError if authentication """ - :param Any request: - :return: None - """ - retries_remaining = True + retry_active = True response = None - retry_settings = self.configure_retries(request) - while retries_remaining: + retry_settings = self.configure_retries(request.context.options) + absolute_timeout = retry_settings['timeout'] + is_response_error = True + + while retry_active: try: + start_time = time.time() + self._configure_timeout(request, absolute_timeout, is_response_error) response = self.next.send(request) - if is_retry(response, retry_settings["mode"]): - retries_remaining = increment( - retry_settings, - request=request.http_request, - response=response.http_response, - ) - if retries_remaining: - retry_hook( - retry_settings, - request=request.http_request, - response=response.http_response, - error=None, - ) - self.sleep(retry_settings, request.context.transport) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.update_request(request, retry_settings) + self.sleep(retry_settings, request.context.transport, response=response) + is_response_error = True continue break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise except AzureError as err: - retries_remaining = increment( - retry_settings, request=request.http_request, error=err - ) - if retries_remaining: - retry_hook( - retry_settings, - request=request.http_request, - response=None, - error=err, - ) - self.sleep(retry_settings, request.context.transport) - continue + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.update_request(request, retry_settings) + self.sleep(retry_settings, request.context.transport) + if isinstance(err, ServiceRequestError): + is_response_error = False + else: + is_response_error = True + continue raise err - if retry_settings["history"]: - response.context["history"] = retry_settings["history"] - response.http_response.location_mode = retry_settings["mode"] - return response - - -class ExponentialRetry(TablesRetryPolicy): - """Exponential retry.""" + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) - def __init__( - self, - initial_backoff=15, - increment_base=3, - retry_total=3, - retry_to_secondary=False, - random_jitter_range=3, - **kwargs - ): - """ - Constructs an Exponential retry object. The initial_backoff is used for - the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. For example, by default the first retry - occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the - third after (15+3^2) = 24 seconds. - - :param int initial_backoff: - The initial backoff interval, in seconds, for the first retry. - :param int increment_base: - The base, in seconds, to increment the initial_backoff by after the - first retry. - :param int max_attempts: - The maximum number of retry attempts. - :param int retry_total: total number of retries - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.initial_backoff = initial_backoff - self.increment_base = increment_base - self.random_jitter_range = random_jitter_range - super(ExponentialRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs - ) - - def get_backoff_time(self, settings): - """ - Calculates how long to sleep before retrying. - :param dict settings: - :keyword callable cls: A custom type or function that will be passed the direct response - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - backoff = self.initial_backoff + ( - 0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]) - ) - random_range_start = ( - backoff - self.random_jitter_range - if backoff > self.random_jitter_range - else 0 - ) - random_range_end = backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - -class LinearRetry(TablesRetryPolicy): - """Linear retry.""" - - def __init__( - self, - backoff=15, - retry_total=3, - retry_to_secondary=False, - random_jitter_range=3, - **kwargs - ): - """ - Constructs a Linear retry object. - - :param int backoff: - The backoff interval, in seconds, between retries. - :param int max_attempts: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int retry_total: total number of retries - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.backoff = backoff - self.random_jitter_range = random_jitter_range - super(LinearRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs - ) - - def get_backoff_time(self, settings): - """ - Calculates how long to sleep before retrying. - - :param dict settings: - :keyword callable cls: A custom type or function that will be passed the direct response - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - # the backoff interval normally does not change, however there is the possibility - # that it was modified by accessing the property directly after initializing the object - random_range_start = ( - self.backoff - self.random_jitter_range - if self.backoff > self.random_jitter_range - else 0 - ) - random_range_end = self.backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) + self.update_context(response.context, retry_settings) + return response class CosmosPatchTransformPolicy(SansIOHTTPPolicy): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_serialize.py b/sdk/tables/azure-data-tables/azure/data/tables/_serialize.py index 7ae5ba08ef93..58a51eb291ca 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_serialize.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_serialize.py @@ -4,13 +4,14 @@ # license information. # -------------------------------------------------------------------------- -from typing import Dict, Any +from typing import Dict from uuid import UUID from datetime import datetime from math import isnan from enum import Enum import sys +import six from azure.core import MatchConditions from azure.core.exceptions import raise_with_traceback @@ -19,9 +20,6 @@ from ._error import _ERROR_VALUE_TOO_LARGE, _ERROR_TYPE_NOT_SUPPORTED -_SUPPORTED_API_VERSIONS = ["2019-02-02", "2019-07-07"] - - def _get_match_headers(kwargs, match_param, etag_param): if_match = None if_none_match = None @@ -54,17 +52,29 @@ def _get_match_headers(kwargs, match_param, etag_param): return if_match, if_none_match -def get_api_version(kwargs, default): - # type: (Dict[str, Any], str) -> str - api_version = kwargs.pop("api_version", None) - if api_version and api_version not in _SUPPORTED_API_VERSIONS: - versions = "\n".join(_SUPPORTED_API_VERSIONS) - raise ValueError( - "Unsupported API version '{}'. Please select from:\n{}".format( - api_version, versions - ) - ) - return api_version or default +def _parameter_filter_substitution(parameters, query_filter): + # type: (Dict[str, str], str) -> str + """Replace user defined parameter in filter + :param parameters: User defined parameters + :param filter: Filter for querying + """ + if parameters: + filter_strings = query_filter.split(' ') + for index, word in enumerate(filter_strings): + if word[0] == u'@': + val = parameters[word[1:]] + if val in [True, False]: + filter_strings[index] = str(val).lower() + elif isinstance(val, (float, six.integer_types)): + filter_strings[index] = str(val) + elif isinstance(val, datetime): + filter_strings[index] = "datetime'{}'".format(_to_utc_datetime(val)) + elif isinstance(val, UUID): + filter_strings[index] = "guid'{}'".format(str(val)) + else: + filter_strings[index] = "'{}'".format(val.replace("'", "''")) + return ' '.join(filter_strings) + return query_filter def _to_entity_binary(value): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py index 8bcaee5e01f1..d1b965ca106c 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py @@ -18,19 +18,16 @@ from azure.core.paging import ItemPaged from azure.core.tracing.decorator import distributed_trace -from ._constants import CONNECTION_TIMEOUT from ._deserialize import _convert_to_entity, _trim_service_metadata from ._entity import TableEntity -from ._error import _process_table_error -from ._generated import AzureTable +from ._error import _process_table_error, _validate_table_name from ._generated.models import ( SignedIdentifier, TableProperties, ) from ._serialize import _get_match_headers, _add_entity_properties -from ._base_client import parse_connection_str -from ._table_client_base import TableClientBase -from ._serialize import serialize_iso +from ._base_client import parse_connection_str, TablesBaseClient +from ._serialize import serialize_iso, _parameter_filter_substitution from ._deserialize import _return_headers_and_deserialized from ._table_batch import TableBatchOperations from ._models import TableEntityPropertiesPaged, UpdateMode, AccessPolicy @@ -38,7 +35,8 @@ if TYPE_CHECKING: from typing import Optional, Any, Union # pylint: disable=ungrouped-imports -class TableClient(TableClientBase): + +class TableClient(TablesBaseClient): """ :ivar str account_name: Name of the storage account (Cosmos or Azure)""" def __init__( @@ -65,15 +63,17 @@ def __init__( :returns: None """ - super(TableClient, self).__init__( - account_url, table_name, credential=credential, **kwargs - ) - kwargs['connection_timeout'] = kwargs.get('connection_timeout') or CONNECTION_TIMEOUT - self._client = AzureTable( - self.url, - policies=kwargs.pop('policies', self._policies), - **kwargs - ) + if not table_name: + raise ValueError("Please specify a table name.") + _validate_table_name(table_name) + self.table_name = table_name + super(TableClient, self).__init__(account_url, credential=credential, **kwargs) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) @classmethod def from_connection_string( @@ -103,7 +103,7 @@ def from_connection_string( :caption: Authenticating a TableServiceClient from a connection_string """ account_url, credential = parse_connection_str( - conn_str=conn_str, credential=None, service="table", keyword_args=kwargs + conn_str=conn_str, credential=None, keyword_args=kwargs ) return cls(account_url, table_name=table_name, credential=credential, **kwargs) @@ -188,7 +188,6 @@ def set_table_access_policy( :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - self._validate_signed_identifiers(signed_identifiers) identifiers = [] for key, value in signed_identifiers.items(): if value: @@ -201,7 +200,16 @@ def set_table_access_policy( table=self.table_name, table_acl=signed_identifiers or None, **kwargs ) except HttpResponseError as error: - _process_table_error(error) + try: + _process_table_error(error) + except HttpResponseError as table_error: + if (table_error.error_code == 'InvalidXmlDocument' + and len(signed_identifiers) > 5): + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.' + ) + raise @distributed_trace def create_table( @@ -462,7 +470,7 @@ def list_entities( @distributed_trace def query_entities( self, - filter, # type: str pylint: disable=redefined-builtin + query_filter, **kwargs ): # type: (...) -> ItemPaged[TableEntity] @@ -487,8 +495,8 @@ def query_entities( :caption: Query entities held within a table """ parameters = kwargs.pop("parameters", None) - filter = self._parameter_filter_substitution( - parameters, filter + query_filter = _parameter_filter_substitution( + parameters, query_filter ) top = kwargs.pop("results_per_page", None) user_select = kwargs.pop("select", None) @@ -500,7 +508,7 @@ def query_entities( command, table=self.table_name, results_per_page=top, - filter=filter, + filter=query_filter, select=user_select, page_iterator_class=TableEntityPropertiesPaged, ) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_client_base.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_client_base.py deleted file mode 100644 index fae58e0032a0..000000000000 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_client_base.py +++ /dev/null @@ -1,86 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse # type: ignore - -from ._error import _validate_table_name -from ._base_client import parse_query -from ._base_client import StorageAccountHostsMixin - - -class TableClientBase(StorageAccountHostsMixin): - """Create TableClientBase from a Credential. - - :param account_url: - A url to an Azure Storage account. - :type account_url: str - :param table_name: The table name. - :type table_name: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string, an account shared access - key. - :type credential: str - - :returns: None - """ - - def __init__( - self, - account_url, # type: str - table_name, # type: str - credential=None, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - - _validate_table_name(table_name) - - try: - if not account_url.lower().startswith("http"): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Account URL must be a string.") - parsed_url = urlparse(account_url.rstrip("/")) - if not table_name: - raise ValueError("Please specify a table name.") - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(parsed_url)) - - _, sas_token = parse_query(parsed_url.query) - if not sas_token and not credential: - raise ValueError( - "You need to provide either a SAS token or an account shared key to authenticate." - ) - - self.table_name = table_name - self._query_str, credential = self._format_query_string(sas_token, credential) - super(TableClientBase, self).__init__( - parsed_url, service="table", credential=credential, **kwargs - ) - - def _format_url(self, hostname): - """Format the endpoint URL according to the current location - mode hostname. - """ - return "{}://{}{}".format(self.scheme, hostname, self._query_str) - - @classmethod - def _validate_signed_identifiers(cls, signed_identifiers): - # type: (...) -> None - """Validate the number of signed identifiers is less than five - - :param signed_identifiers: - :type signed_identifiers: dict[str,AccessPolicy] - """ - if len(signed_identifiers) > 5: - raise ValueError( - 'Too many access policies provided. The server does not support setting ' - 'more than 5 access policies on a single resource.') diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py index cf60ecbedb9e..5edfdb74f3fd 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py @@ -11,8 +11,6 @@ from azure.core.tracing.decorator import distributed_trace from azure.core.pipeline import Pipeline -from ._constants import CONNECTION_TIMEOUT -from ._generated import AzureTable from ._generated.models import TableProperties, TableServiceProperties from ._models import ( TablePropertiesPaged, @@ -20,24 +18,15 @@ service_properties_deserialize, TableItem ) -from ._base_client import parse_connection_str +from ._base_client import parse_connection_str, TablesBaseClient, TransportWrapper from ._models import LocationMode from ._error import _process_table_error from ._table_client import TableClient -from ._table_service_client_base import TableServiceClientBase +from ._serialize import _parameter_filter_substitution -class TableServiceClient(TableServiceClientBase): - """ :ivar str account_name: Name of the storage account (Cosmos or Azure)""" - - def __init__( - self, - account_url, # type: str - credential=None, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Create TableServiceClient from a Credential. +class TableServiceClient(TablesBaseClient): + """Create TableServiceClient from a Credential. :param account_url: A url to an Azure Storage account. @@ -66,15 +55,12 @@ def __init__( :dedent: 8 :caption: Authenticating a TableServiceClient from a Shared Account Key """ - super(TableServiceClient, self).__init__( - account_url, service="table", credential=credential, **kwargs - ) - kwargs['connection_timeout'] = kwargs.get('connection_timeout') or CONNECTION_TIMEOUT - self._client = AzureTable( - self.url, - policies=kwargs.pop('policies', self._policies), - **kwargs - ) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) @classmethod def from_connection_string( @@ -100,7 +86,7 @@ def from_connection_string( :caption: Authenticating a TableServiceClient from a connection_string """ account_url, credential = parse_connection_str( - conn_str=conn_str, credential=None, service="table", keyword_args=kwargs + conn_str=conn_str, credential=None, keyword_args=kwargs ) return cls(account_url, credential=credential, **kwargs) @@ -268,7 +254,7 @@ def delete_table( @distributed_trace def query_tables( self, - filter, # pylint: disable=redefined-builtin + query_filter, **kwargs # type: Any ): # type: (...) -> ItemPaged[TableItem] @@ -294,9 +280,9 @@ def query_tables( :caption: Querying tables in a storage account """ parameters = kwargs.pop("parameters", None) - filter = self._parameter_filter_substitution( - parameters, filter - ) # pylint: disable=redefined-builtin + query_filter = _parameter_filter_substitution( + parameters, query_filter + ) top = kwargs.pop("results_per_page", None) user_select = kwargs.pop("select", None) if user_select and not isinstance(user_select, str): @@ -306,7 +292,7 @@ def query_tables( return ItemPaged( command, results_per_page=top, - filter=filter, + filter=query_filter, select=user_select, page_iterator_class=TablePropertiesPaged, ) @@ -360,24 +346,17 @@ def get_table_client(self, table_name, **kwargs): :rtype: ~azure.data.tables.TableClient """ - - _pipeline = Pipeline( - transport=self._client._client._pipeline._transport, # pylint: disable=protected-access - policies=self._policies, # pylint: disable=protected-access + pipeline = Pipeline( + transport=TransportWrapper(self._client._client._pipeline._transport), # pylint: disable = protected-access + policies=self._policies ) - return TableClient( self.url, table_name=table_name, credential=self.credential, - key_resolver_function=self.key_resolver_function, - require_encryption=self.require_encryption, - key_encryption_key=self.key_encryption_key, api_version=self.api_version, - transport=self._client._client._pipeline._transport, # pylint: disable=protected-access - policies=self._policies, - _configuration=self._client._config, # pylint: disable=protected-access - _location_mode=self._location_mode, + pipeline=pipeline, + location_mode=self._location_mode, _hosts=self._hosts, **kwargs ) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client_base.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client_base.py deleted file mode 100644 index d0c6476ddcd2..000000000000 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client_base.py +++ /dev/null @@ -1,63 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse # type: ignore - -from ._base_client import parse_query, StorageAccountHostsMixin - - -class TableServiceClientBase(StorageAccountHostsMixin): - """:ivar str account_name: Name of the storage account (Cosmos or Azure) - Create TableServiceClientBase class for sync and async code. - - :param account_url: - A account_url url to an Azure Storage account. - :type service: str - :param credential: - The credentials with which to authenticate. This is optional if the - account URL already has a SAS token, or the connection string already has shared - access key values. The value can be a SAS token string or, an account shared access - key. - :type credential: str - :returns: None - """ - - def __init__( - self, - account_url, # type: Any - service, # type: str - credential=None, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - - try: - if not account_url.lower().startswith("http"): - account_url = "https://" + account_url - except AttributeError: - raise ValueError("Account URL must be a string.") - parsed_url = urlparse(account_url.rstrip("/")) - if not parsed_url.netloc: - raise ValueError("Invalid URL: {}".format(account_url)) - - _, sas_token = parse_query(parsed_url.query) - if not sas_token and not credential: - raise ValueError( - "You need to provide either a SAS token or an account shared key to authenticate." - ) - self._query_str, credential = self._format_query_string(sas_token, credential) - super(TableServiceClientBase, self).__init__( - parsed_url, service=service, credential=credential, **kwargs - ) - - def _format_url(self, hostname): - """Format the endpoint URL according to the current location - mode hostname. - """ - return "{}://{}{}".format(self.scheme, hostname, self._query_str) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py index ea5f21623d77..5fe3cb7f586e 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py @@ -4,18 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( # pylint: disable=unused-import - Union, - Optional, - Any, - Iterable, - Dict, - List, - Type, - Tuple, - TYPE_CHECKING, -) -import logging +from typing import Any, List from uuid import uuid4 from azure.core.credentials import AzureSasCredential @@ -28,44 +17,43 @@ HttpLoggingPolicy, UserAgentPolicy, ProxyPolicy, - AzureSasCredentialPolicy + AzureSasCredentialPolicy, + RequestIdPolicy, + CustomHookPolicy, + NetworkTraceLoggingPolicy ) from azure.core.pipeline.transport import ( AsyncHttpTransport, HttpRequest, ) +from .._generated.aio import AzureTable +from .._base_client import AccountHostsMixin, get_api_version from .._authentication import SharedKeyCredentialPolicy -from .._constants import STORAGE_OAUTH_SCOPE, CONNECTION_TIMEOUT, READ_TIMEOUT -from .._generated.aio._configuration import AzureTableConfiguration +from .._constants import STORAGE_OAUTH_SCOPE from .._models import BatchErrorException, BatchTransactionResult -from .._policies import ( - CosmosPatchTransformPolicy, - StorageContentValidation, - StorageRequestHook, - StorageHosts, - StorageHeadersPolicy, - StorageLoggingPolicy, -) +from .._policies import StorageHosts, StorageHeadersPolicy from .._sdk_moniker import SDK_MONIKER -from ._policies_async import ( - AsyncStorageResponseHook, - AsyncTablesRetryPolicy -) - -if TYPE_CHECKING: - from azure.core.pipeline import Pipeline - from azure.core.configuration import Configuration +from ._policies_async import AsyncTablesRetryPolicy -_LOGGER = logging.getLogger(__name__) +class AsyncTablesBaseClient(AccountHostsMixin): -class AsyncStorageAccountHostsMixin(object): - def __enter__(self): - raise TypeError("Async client only supports 'async with'.") + def __init__( + self, + account_url, # type: str + credential=None, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + super(AsyncTablesBaseClient, self).__init__(account_url, credential=credential, **kwargs) + self._client = AzureTable( + self.url, + policies=kwargs.pop('policies', self._policies), + **kwargs + ) + self._client._config.version = get_api_version(kwargs, self._client._config.version) # pylint: disable=protected-access - def __exit__(self, *args): - pass async def __aenter__(self): await self._client.__aenter__() @@ -74,7 +62,7 @@ async def __aenter__(self): async def __aexit__(self, *args): await self._client.__aexit__(*args) - async def close(self): + async def close(self) -> None: """This method is to close the sockets opened by the client. It need not be used when using with a context manager. """ @@ -82,7 +70,6 @@ async def close(self): def _configure_credential(self, credential): # type: (Any) -> None - self._credential_policy = None if hasattr(credential, "get_token"): self._credential_policy = AsyncBearerTokenCredentialPolicy( credential, STORAGE_OAUTH_SCOPE @@ -95,38 +82,22 @@ def _configure_credential(self, credential): raise TypeError("Unsupported credential: {}".format(credential)) def _configure_policies(self, **kwargs): - # type: (**Any) -> None - try: - from azure.core.pipeline.transport import AioHttpTransport - if not kwargs.get("transport"): - kwargs.setdefault("transport", AioHttpTransport(**kwargs)) - except ImportError: - raise ImportError( - "Unable to create async transport. Please check aiohttp is installed." - ) - - kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) - kwargs.setdefault("read_timeout", READ_TIMEOUT) - self._policies = [ + return [ + RequestIdPolicy(**kwargs), StorageHeadersPolicy(**kwargs), - ProxyPolicy(**kwargs), UserAgentPolicy(sdk_moniker=SDK_MONIKER, **kwargs), - StorageContentValidation(), - StorageRequestHook(**kwargs), + ProxyPolicy(**kwargs), self._credential_policy, ContentDecodePolicy(response_encoding="utf-8"), AsyncRedirectPolicy(**kwargs), - StorageHosts(hosts=self._hosts, **kwargs), + StorageHosts(**kwargs), AsyncTablesRetryPolicy(**kwargs), - StorageLoggingPolicy(**kwargs), - AsyncStorageResponseHook(**kwargs), + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), DistributedTracingPolicy(**kwargs), HttpLoggingPolicy(**kwargs), ] - if self._cosmos_endpoint: - self._policies.insert(0, CosmosPatchTransformPolicy()) - async def _batch_send( self, entities, # type: List[TableEntity] @@ -202,7 +173,6 @@ class AsyncTransportWrapper(AsyncHttpTransport): by a `get_client` method does not close the outer transport for the parent when used in a context manager. """ - def __init__(self, async_transport): self._transport = async_transport diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_policies_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_policies_async.py index 89ff936c4539..96139f7c5b4e 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_policies_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_policies_async.py @@ -3,317 +3,153 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +import time -import asyncio -import random -import logging -from typing import Any, TYPE_CHECKING +from azure.core.pipeline.policies import AsyncRetryPolicy +from azure.core.exceptions import ( + AzureError, + ClientAuthenticationError, + ServiceRequestError +) -from azure.core.pipeline.policies import AsyncHTTPPolicy, AsyncRetryPolicy -from azure.core.exceptions import AzureError +from .._models import LocationMode +from .._policies import set_next_host_location -from .._policies import is_retry, increment, TablesRetryPolicy -if TYPE_CHECKING: - from azure.core.pipeline import PipelineRequest, PipelineResponse +class AsyncTablesRetryPolicy(AsyncRetryPolicy): + """A retry policy. + The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis. -_LOGGER = logging.getLogger(__name__) + :keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host + location. Default value is False. + :keyword int retry_total: Total number of retries to allow. Takes precedence over other counts. + Default value is 10. -async def retry_hook(settings, **kwargs): - if settings["hook"]: - if asyncio.iscoroutine(settings["hook"]): - await settings["hook"]( - retry_count=settings["count"] - 1, - location_mode=settings["mode"], - **kwargs - ) - else: - settings["hook"]( - retry_count=settings["count"] - 1, - location_mode=settings["mode"], - **kwargs - ) + :keyword int retry_connect: How many connection-related errors to retry on. + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. Default value is 3. + :keyword int retry_read: How many times to retry on read errors. + These errors are raised after the request was sent to the server, so the + request may have side-effects. Default value is 3. -class AsyncStorageResponseHook(AsyncHTTPPolicy): - def __init__(self, **kwargs): - self._response_callback = kwargs.get("raw_response_hook") - super(AsyncStorageResponseHook, self).__init__() + :keyword int retry_status: How many times to retry on bad status codes. Default value is 3. - async def send(self, request): - # type: (PipelineRequest) -> PipelineResponse - data_stream_total = request.context.get( - "data_stream_total" - ) or request.context.options.pop("data_stream_total", None) - download_stream_current = request.context.get( - "download_stream_current" - ) or request.context.options.pop("download_stream_current", None) - upload_stream_current = request.context.get( - "upload_stream_current" - ) or request.context.options.pop("upload_stream_current", None) - response_callback = request.context.get( - "response_callback" - ) or request.context.options.pop("raw_response_hook", self._response_callback) + :keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a delay). + In fixed mode, retry policy will alwasy sleep for {backoff factor}. + In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` + seconds. If the backoff_factor is 0.1, then the retry will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8. - response = await self.next.send(request) - await response.http_response.load_body() + :keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes). - will_retry = is_retry(response, request.context.options.get("mode")) - if not will_retry and download_stream_current is not None: - download_stream_current += int( - response.http_response.headers.get("Content-Length", 0) - ) - if data_stream_total is None: - content_range = response.http_response.headers.get("Content-Range") - if content_range: - data_stream_total = int( - content_range.split(" ", 1)[1].split("/", 1)[1] - ) - else: - data_stream_total = download_stream_current - elif not will_retry and upload_stream_current is not None: - upload_stream_current += int( - response.http_request.headers.get("Content-Length", 0) - ) - for pipeline_obj in [request, response]: - pipeline_obj.context["data_stream_total"] = data_stream_total - pipeline_obj.context["download_stream_current"] = download_stream_current - pipeline_obj.context["upload_stream_current"] = upload_stream_current - if response_callback: - if asyncio.iscoroutine(response_callback): - await response_callback(response) - else: - response_callback(response) - request.context["response_callback"] = response_callback - return response + :keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential. + :keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days). + """ -class AsyncTablesRetryPolicy(AsyncRetryPolicy, TablesRetryPolicy): - """Exponential retry.""" - - def __init__( - self, - initial_backoff=15, - increment_base=3, - retry_total=3, - retry_to_secondary=False, - random_jitter_range=3, - **kwargs - ): + def __init__(self, **kwargs): + super(AsyncTablesRetryPolicy, self).__init__(**kwargs) + self.retry_to_secondary = kwargs.get('retry_to_secondary', False) + + def is_retry(self, settings, response): + """Is this method/status code retryable? (Based on whitelists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) """ - Constructs an Exponential retry object. The initial_backoff is used for - the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. For example, by default the first retry - occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the - third after (15+3^2) = 24 seconds. - - :param int initial_backoff: - The initial backoff interval, in seconds, for the first retry. - :param int increment_base: - The base, in seconds, to increment the initial_backoff by after the - first retry. - :param int max_attempts: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + should_retry = super(AsyncTablesRetryPolicy, self).is_retry(settings, response) + status = response.http_response.status_code + if status == 404 and settings['mode'] == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + return should_retry + + def configure_retries(self, options): + """Configures the retry settings. + + :param options: keyword arguments from context. + :return: A dict containing settings and history for retries. + :rtype: dict """ - self.initial_backoff = initial_backoff - self.increment_base = increment_base - self.random_jitter_range = random_jitter_range - super(AsyncTablesRetryPolicy, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs - ) - - def get_backoff_time(self, settings): + config = super(AsyncTablesRetryPolicy, self).configure_retries(options) + config["retry_secondary"] = options.pop("retry_to_secondary", self.retry_to_secondary) + config["mode"] = options.pop("location_mode", LocationMode.PRIMARY) + config["hosts"] = options.pop("hosts", None) + return config + + def update_context(self, context, retry_settings): + """Updates retry history in pipeline context. + + :param context: The pipeline context. + :type context: ~azure.core.pipeline.PipelineContext + :param retry_settings: The retry settings. + :type retry_settings: dict """ - Calculates how long to sleep before retrying. + super(AsyncTablesRetryPolicy, self).update_context(context, retry_settings) + context['location_mode'] = retry_settings['mode'] - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - backoff = self.initial_backoff + ( - 0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]) - ) - random_range_start = ( - backoff - self.random_jitter_range - if backoff > self.random_jitter_range - else 0 - ) - random_range_end = backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) + def update_request(self, request, retry_settings): # pylint: disable=no-self-use + """Updates the pipeline request before attempting to retry. - async def sleep( # pylint: disable=arguments-differ - self, settings, transport - ): - backoff = self.get_backoff_time(settings) - if not backoff or backoff < 0: - return - await transport.sleep(backoff) + :param PipelineRequest request: The outgoing request. + :param dict(str, Any) retry_settings: The current retry context settings. + """ + set_next_host_location(retry_settings, request) async def send(self, request): - retries_remaining = True + """Uses the configured retry policy to send the request to the next policy in the pipeline. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raise: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raise: ~azure.core.exceptions.ClientAuthenticationError if authentication fails + """ + retry_active = True response = None - retry_settings = self.configure_retries(request) - while retries_remaining: + retry_settings = self.configure_retries(request.context.options) + absolute_timeout = retry_settings['timeout'] + is_response_error = True + + while retry_active: try: + start_time = time.time() + self._configure_timeout(request, absolute_timeout, is_response_error) response = await self.next.send(request) - if is_retry(response, retry_settings["mode"]): - retries_remaining = increment( - retry_settings, - request=request.http_request, - response=response.http_response - ) - if retries_remaining: - await retry_hook( - retry_settings, - request=request.http_request, - response=response.http_response, - error=None, - ) - await self.sleep(retry_settings, request.context.transport) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.update_request(request, retry_settings) + await self.sleep(retry_settings, request.context.transport, response=response) + is_response_error = True continue break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise except AzureError as err: - retries_remaining = increment( - retry_settings, request=request.http_request, error=err) - if retries_remaining: - await retry_hook( - retry_settings, - request=request.http_request, - response=None, - error=err, - ) - await self.sleep(retry_settings, request.context.transport) - continue + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.update_request(request, retry_settings) + await self.sleep(retry_settings, request.context.transport) + if isinstance(err, ServiceRequestError): + is_response_error = False + else: + is_response_error = True + continue raise err - if retry_settings["history"]: - response.context["history"] = retry_settings["history"] - response.http_response.location_mode = retry_settings["mode"] - return response - + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) -class ExponentialRetry(AsyncTablesRetryPolicy): - """Exponential retry.""" - - def __init__( - self, - initial_backoff=15, - increment_base=3, - retry_total=3, - retry_to_secondary=False, - random_jitter_range=3, - **kwargs - ): - """ - Constructs an Exponential retry object. The initial_backoff is used for - the first retry. Subsequent retries are retried after initial_backoff + - increment_power^retry_count seconds. For example, by default the first retry - occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the - third after (15+3^2) = 24 seconds. - - :param int initial_backoff: - The initial backoff interval, in seconds, for the first retry. - :param int increment_base: - The base, in seconds, to increment the initial_backoff by after the - first retry. - :param int max_attempts: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.initial_backoff = initial_backoff - self.increment_base = increment_base - self.random_jitter_range = random_jitter_range - super(ExponentialRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs - ) - - def get_backoff_time(self, settings): - """ - Calculates how long to sleep before retrying. - - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - backoff = self.initial_backoff + ( - 0 if settings["count"] == 0 else pow(self.increment_base, settings["count"]) - ) - random_range_start = ( - backoff - self.random_jitter_range - if backoff > self.random_jitter_range - else 0 - ) - random_range_end = backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) - - -class LinearRetry(AsyncTablesRetryPolicy): - """Linear retry.""" - - def __init__( - self, - backoff=15, - retry_total=3, - retry_to_secondary=False, - random_jitter_range=3, - **kwargs - ): - """ - Constructs a Linear retry object. - - :param int backoff: - The backoff interval, in seconds, between retries. - :param int max_attempts: - The maximum number of retry attempts. - :param bool retry_to_secondary: - Whether the request should be retried to secondary, if able. This should - only be enabled of RA-GRS accounts are used and potentially stale data - can be handled. - :param int random_jitter_range: - A number in seconds which indicates a range to jitter/randomize for the back-off interval. - For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. - """ - self.backoff = backoff - self.random_jitter_range = random_jitter_range - super(LinearRetry, self).__init__( - retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs - ) - - def get_backoff_time(self, settings, **kwargs): # pylint: disable=unused-argument - """ - Calculates how long to sleep before retrying. - - :param **kwargs: - :return: - An integer indicating how long to wait before retrying the request, - or None to indicate no retry should be performed. - :rtype: int or None - """ - random_generator = random.Random() - # the backoff interval normally does not change, however there is the possibility - # that it was modified by accessing the property directly after initializing the object - random_range_start = ( - self.backoff - self.random_jitter_range - if self.backoff > self.random_jitter_range - else 0 - ) - random_range_end = self.backoff + self.random_jitter_range - return random_generator.uniform(random_range_start, random_range_end) + self.update_context(response.context, retry_settings) + return response diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_batch_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_batch_async.py index aac9335535ab..3c9f869713c8 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_batch_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_batch_async.py @@ -6,8 +6,6 @@ from typing import Dict, Any, Optional, Union, TYPE_CHECKING import msrest -from azure.core.pipeline import PipelineResponse - from .._common_conversion import _is_cosmos_endpoint, _transform_patch_to_cosmos_post from .._models import UpdateMode from .._serialize import ( @@ -21,6 +19,7 @@ if TYPE_CHECKING: from .._generated.models import QueryOptions + class TableBatchOperations(object): """ This is the class that is used for batch operations for the data tables diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py index 5543e2125285..9f7db89934a6 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py @@ -22,26 +22,21 @@ from azure.core.tracing.decorator_async import distributed_trace_async from .._base_client import parse_connection_str -from .._common_conversion import _is_cosmos_endpoint -from .._constants import CONNECTION_TIMEOUT from .._entity import TableEntity -from .._generated.aio import AzureTable from .._generated.models import SignedIdentifier, TableProperties from .._models import AccessPolicy, BatchTransactionResult -from .._serialize import serialize_iso +from .._serialize import serialize_iso, _parameter_filter_substitution from .._deserialize import _return_headers_and_deserialized -from .._error import _process_table_error +from .._error import _process_table_error, _validate_table_name from .._models import UpdateMode from .._deserialize import _convert_to_entity, _trim_service_metadata from .._serialize import _add_entity_properties, _get_match_headers -from .._table_client_base import TableClientBase -from ._base_client_async import AsyncStorageAccountHostsMixin +from ._base_client_async import AsyncTablesBaseClient from ._models import TableEntityPropertiesPaged -from ._policies_async import ExponentialRetry from ._table_batch_async import TableBatchOperations -class TableClient(AsyncStorageAccountHostsMixin, TableClientBase): +class TableClient(AsyncTablesBaseClient): """ :ivar str account_name: Name of the storage account (Cosmos or Azure)""" def __init__( @@ -68,29 +63,17 @@ def __init__( :returns: None """ - kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry( - **kwargs - ) - loop = kwargs.pop("loop", None) - - self._cosmos_endpoint = _is_cosmos_endpoint(account_url) - - super(TableClient, self).__init__( - account_url, - table_name=table_name, - credential=credential, - loop=loop, - **kwargs - ) - kwargs['connection_timeout'] = kwargs.get('connection_timeout') or CONNECTION_TIMEOUT - self._configure_policies(**kwargs) - self._client = AzureTable( - self.url, - policies=kwargs.pop('policies', self._policies), - loop=loop, - **kwargs - ) - self._loop = loop + if not table_name: + raise ValueError("Please specify a table name.") + _validate_table_name(table_name) + self.table_name = table_name + super(TableClient, self).__init__(account_url, credential=credential, **kwargs) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) @classmethod def from_connection_string( @@ -120,7 +103,7 @@ def from_connection_string( :caption: Creating the TableClient from a connection string. """ account_url, credential = parse_connection_str( - conn_str=conn_str, credential=None, service="table", keyword_args=kwargs + conn_str=conn_str, credential=None, keyword_args=kwargs ) return cls(account_url, table_name=table_name, credential=credential, **kwargs) @@ -210,7 +193,6 @@ async def set_table_access_policy( :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - self._validate_signed_identifiers(signed_identifiers) identifiers = [] for key, value in signed_identifiers.items(): if value: @@ -223,7 +205,16 @@ async def set_table_access_policy( table=self.table_name, table_acl=signed_identifiers or None, **kwargs ) except HttpResponseError as error: - _process_table_error(error) + try: + _process_table_error(error) + except HttpResponseError as table_error: + if (table_error.error_code == 'InvalidXmlDocument' + and len(signed_identifiers) > 5): + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.' + ) + raise @distributed_trace_async async def create_table( @@ -481,7 +472,7 @@ def list_entities( @distributed_trace def query_entities( self, - filter, # type: str # pylint: disable=redefined-builtin + query_filter, **kwargs ): # type: (...) -> AsyncItemPaged[TableEntity] @@ -506,9 +497,9 @@ def query_entities( :caption: Querying entities from a TableClient """ parameters = kwargs.pop("parameters", None) - filter = self._parameter_filter_substitution( - parameters, filter - ) # pylint: disable = redefined-builtin + query_filter = _parameter_filter_substitution( + parameters, query_filter + ) top = kwargs.pop("results_per_page", None) user_select = kwargs.pop("select", None) if user_select and not isinstance(user_select, str): @@ -519,7 +510,7 @@ def query_entities( command, table=self.table_name, results_per_page=top, - filter=filter, + filter=query_filter, select=user_select, page_iterator_class=TableEntityPropertiesPaged, ) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py index 3adebc7f14f9..e3be666c4879 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py @@ -17,21 +17,18 @@ from azure.core.tracing.decorator_async import distributed_trace_async from .. import LocationMode -from .._constants import CONNECTION_TIMEOUT from .._base_client import parse_connection_str -from .._generated.aio._azure_table import AzureTable -from .._generated.models import TableServiceProperties, TableProperties +from .._generated.models import TableServiceProperties from .._models import service_stats_deserialize, service_properties_deserialize from .._error import _process_table_error -from .._table_service_client_base import TableServiceClientBase from .._models import TableItem -from ._policies_async import ExponentialRetry +from .._serialize import _parameter_filter_substitution from ._table_client_async import TableClient -from ._base_client_async import AsyncStorageAccountHostsMixin +from ._base_client_async import AsyncTablesBaseClient, AsyncTransportWrapper from ._models import TablePropertiesPaged -class TableServiceClient(AsyncStorageAccountHostsMixin, TableServiceClientBase): +class TableServiceClient(AsyncTablesBaseClient): """A client to interact with the Table Service at the account level. This client provides operations to retrieve and configure the account properties @@ -76,28 +73,11 @@ class TableServiceClient(AsyncStorageAccountHostsMixin, TableServiceClientBase): :caption: Creating the tableServiceClient with Shared Access Signature. """ - def __init__( - self, - account_url, # type: str - credential=None, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry( - **kwargs - ) - loop = kwargs.pop("loop", None) - super(TableServiceClient, self).__init__( # type: ignore - account_url, service="table", credential=credential, loop=loop, **kwargs - ) - kwargs['connection_timeout'] = kwargs.get('connection_timeout') or CONNECTION_TIMEOUT - self._configure_policies(**kwargs) - self._client = AzureTable( - self.url, - policies=kwargs.pop('policies', self._policies), - **kwargs - ) - self._loop = loop + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) @classmethod def from_connection_string( @@ -124,7 +104,7 @@ def from_connection_string( """ account_url, credential = parse_connection_str( - conn_str=conn_str, credential=None, service="table", keyword_args=kwargs + conn_str=conn_str, credential=None, keyword_args=kwargs ) return cls(account_url, credential=credential, **kwargs) @@ -330,14 +310,14 @@ def list_tables( @distributed_trace def query_tables( self, - filter, # type: str pylint: disable=redefined-builtin + query_filter, # type: str **kwargs # type: Any ): # type: (...) -> AsyncItemPaged[TableItem] """Queries tables under the given account. - :param filter: Specify a filter to return certain tables. - :type filter: str + :param query_filter: Specify a filter to return certain tables. + :type query_filter: str :keyword int results_per_page: Number of tables per page in return ItemPaged :keyword select: Specify desired properties of a table to return certain tables :paramtype select: str or list[str] @@ -356,20 +336,19 @@ def query_tables( :caption: Querying tables in an account given specific parameters """ parameters = kwargs.pop("parameters", None) - filter = self._parameter_filter_substitution( - parameters, filter - ) # pylint: disable=redefined-builtin + query_filter = _parameter_filter_substitution( + parameters, query_filter + ) user_select = kwargs.pop("select", None) if user_select and not isinstance(user_select, str): user_select = ", ".join(user_select) top = kwargs.pop("results_per_page", None) - command = functools.partial(self._client.table.query, **kwargs) return AsyncItemPaged( command, results_per_page=top, select=user_select, - filter=filter, + filter=query_filter, page_iterator_class=TablePropertiesPaged, ) @@ -391,23 +370,17 @@ def get_table_client( :rtype: ~azure.data.tables.TableClient """ - _pipeline = AsyncPipeline( - transport=self._client._client._pipeline._transport, # pylint: disable=protected-access - policies=self._policies, # pylint: disable = protected-access + pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._client._client._pipeline._transport), # pylint:disable=protected-access + policies=self._policies, ) - return TableClient( self.url, table_name=table_name, credential=self.credential, - key_resolver_function=self.key_resolver_function, - require_encryption=self.require_encryption, - key_encryption_key=self.key_encryption_key, api_version=self.api_version, - transport=self._client._client._pipeline._transport, # pylint: disable=protected-access - policies=self._policies, - _configuration=self._client._config, # pylint: disable=protected-access - _location_mode=self._location_mode, + pipeline=pipeline, + location_mode=self._location_mode, _hosts=self._hosts, **kwargs ) diff --git a/sdk/tables/azure-data-tables/samples/README.md b/sdk/tables/azure-data-tables/samples/README.md index e8fe15e7735f..17e49ae5323d 100644 --- a/sdk/tables/azure-data-tables/samples/README.md +++ b/sdk/tables/azure-data-tables/samples/README.md @@ -71,8 +71,8 @@ parameters = { "pk": PartitionKey, "rk": RowKey } -filter = "PartitionKey eq @pk and RowKey eq @rk" -table_client.query_entities(filter=filter, parameter=pk) +query_filter = "PartitionKey eq @pk and RowKey eq @rk" +table_client.query_entities(query_filter, parameter=pk) ``` #### Filter on Properties @@ -81,43 +81,43 @@ parameters = { "first": first_name, "last": last_name } -filter = "FirstName eq @first or LastName eq @last" -table_client.query_entities(filter=filter, parameter=pk) +query_filter = "FirstName eq @first or LastName eq @last" +table_client.query_entities(query_filter, parameter=pk) ``` #### Filter with string comparison operators ```python -filter = "LastName ge 'A' and LastName lt 'B'" -table_client.query_entities(filter=filter) +query_filter = "LastName ge 'A' and LastName lt 'B'" +table_client.query_entities(query_filter) ``` #### Filter with numeric properties ```python -filter = "Age gt 30" -table_client.query_entities(filter=filter) +query_filter = "Age gt 30" +table_client.query_entities(query_filter) ``` ```python -filter = "AmountDue le 100.25" -table_client.query_entities(filter=filter) +query_filter = "AmountDue le 100.25" +table_client.query_entities(query_filter) ``` #### Filter with boolean properties ```python -filter = "IsActive eq true" -table_client.query_entities(filter=filter) +query_filter = "IsActive eq true" +table_client.query_entities(query_filter) ``` #### Filter with DateTime properties ```python -filter = "CustomerSince eq datetime'2008-07-10T00:00:00Z'" -table_client.query_entities(filter=filter) +query_filter = "CustomerSince eq datetime'2008-07-10T00:00:00Z'" +table_client.query_entities(query_filter) ``` #### Filter with GUID properties ```python -filter = "GuidValue eq guid'a455c695-df98-5678-aaaa-81d3367e5a34'" -table_client.query_entities(filter=filter) +query_filter = "GuidValue eq guid'a455c695-df98-5678-aaaa-81d3367e5a34'" +table_client.query_entities(query_filter) ``` diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py index db8d0f0a62a4..5bee5fb21b69 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py @@ -84,7 +84,7 @@ async def sample_query_entities(self): } name_filter = u"Name eq @name" async for entity_chosen in table_client.query_entities( - filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters): + query_filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters): print(entity_chosen) except HttpResponseError as e: @@ -105,7 +105,7 @@ async def sample_query_entities_multiple_params(self): } name_filter = u"Name eq @name and Brand eq @brand" queried_entities = table_client.query_entities( - filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters) + query_filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters) async for entity_chosen in queried_entities: print(entity_chosen) @@ -128,7 +128,7 @@ async def sample_query_entities_values(self): } name_filter = u"Value gt @lower and Value lt @upper" queried_entities = table_client.query_entities( - filter=name_filter, select=[u"Value"], parameters=parameters) + query_filter=name_filter, select=[u"Value"], parameters=parameters) async for entity_chosen in queried_entities: print(entity_chosen) diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py index e41e40ddb260..0e910cfeefa2 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py @@ -61,7 +61,7 @@ async def tables_in_account(self): table_name = "mytableasync1" name_filter = "TableName eq '{}'".format(table_name) print("Queried_tables") - async for table in table_service.query_tables(filter=name_filter): + async for table in table_service.query_tables(name_filter): print("\t{}".format(table.table_name)) # [END tsc_query_tables] diff --git a/sdk/tables/azure-data-tables/samples/sample_query_table.py b/sdk/tables/azure-data-tables/samples/sample_query_table.py index e516dc956410..c07cb4d9be3b 100644 --- a/sdk/tables/azure-data-tables/samples/sample_query_table.py +++ b/sdk/tables/azure-data-tables/samples/sample_query_table.py @@ -84,7 +84,7 @@ def sample_query_entities(self): } name_filter = u"Name eq @name" queried_entities = table_client.query_entities( - filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters) + query_filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters) for entity_chosen in queried_entities: print(entity_chosen) @@ -107,7 +107,7 @@ def sample_query_entities_multiple_params(self): } name_filter = u"Name eq @name and Brand eq @brand" queried_entities = table_client.query_entities( - filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters) + query_filter=name_filter, select=[u"Brand",u"Color"], parameters=parameters) for entity_chosen in queried_entities: print(entity_chosen) @@ -131,7 +131,7 @@ def sample_query_entities_values(self): } name_filter = u"Value gt @lower and Value lt @upper" queried_entities = table_client.query_entities( - filter=name_filter, select=[u"Value"], parameters=parameters) + query_filter=name_filter, select=[u"Value"], parameters=parameters) for entity_chosen in queried_entities: print(entity_chosen) diff --git a/sdk/tables/azure-data-tables/samples/sample_query_tables.py b/sdk/tables/azure-data-tables/samples/sample_query_tables.py index eb4409d86f53..5e2a9ce9192c 100644 --- a/sdk/tables/azure-data-tables/samples/sample_query_tables.py +++ b/sdk/tables/azure-data-tables/samples/sample_query_tables.py @@ -61,7 +61,7 @@ def tables_in_account(self): # [START tsc_query_tables] table_name = "mytable1" name_filter = "TableName eq '{}'".format(table_name) - queried_tables = table_service.query_tables(filter=name_filter) + queried_tables = table_service.query_tables(name_filter) print("Queried_tables") for table in queried_tables: diff --git a/sdk/tables/azure-data-tables/setup.py b/sdk/tables/azure-data-tables/setup.py index e36d8076a466..99f6580495f6 100644 --- a/sdk/tables/azure-data-tables/setup.py +++ b/sdk/tables/azure-data-tables/setup.py @@ -78,7 +78,7 @@ 'azure.data', ]), install_requires=[ - "azure-core<2.0.0,>=1.10.0", + "azure-core<2.0.0,>=1.13.0", "msrest>=0.6.19" ], extras_require={ diff --git a/sdk/tables/azure-data-tables/tests/encryption_test_helper.py b/sdk/tables/azure-data-tables/tests/encryption_test_helper.py deleted file mode 100644 index e1e9f5439ec3..000000000000 --- a/sdk/tables/azure-data-tables/tests/encryption_test_helper.py +++ /dev/null @@ -1,90 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -# from cryptography.hazmat.backends import default_backend -# from cryptography.hazmat.primitives.asymmetric.padding import ( -# OAEP, -# MGF1, -# ) -# from cryptography.hazmat.primitives.asymmetric.rsa import generate_private_key -# from cryptography.hazmat.primitives.hashes import SHA1 -# from cryptography.hazmat.primitives.keywrap import ( -# aes_key_wrap, -# aes_key_unwrap, -# ) - - -# class KeyWrapper: -# def __init__(self, kid='local:key1'): -# # Must have constant key value for recorded tests, otherwise we could use a random generator. -# self.kek = b'\xbe\xa4\x11K\x9eJ\x07\xdafF\x83\xad+\xadvA C\xe8\xbc\x90\xa4\x11}G\xc3\x0f\xd4\xb4\x19m\x11' -# self.backend = default_backend() -# self.kid = kid -# -# def wrap_key(self, key, algorithm='A256KW'): -# if algorithm == 'A256KW': -# return aes_key_wrap(self.kek, key, self.backend) -# -# raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) -# -# def unwrap_key(self, key, algorithm): -# if algorithm == 'A256KW': -# return aes_key_unwrap(self.kek, key, self.backend) -# -# raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) -# -# def get_key_wrap_algorithm(self): -# return 'A256KW' -# -# def get_kid(self): -# return self.kid -# -# -# class KeyResolver: -# def __init__(self): -# self.keys = {} -# -# def put_key(self, key): -# self.keys[key.get_kid()] = key -# -# def resolve_key(self, kid): -# return self.keys[kid] -# -# -# class RSAKeyWrapper: -# def __init__(self, kid='local:key2'): -# self.private_key = generate_private_key(public_exponent=65537, -# key_size=2048, -# backend=default_backend()) -# self.public_key = self.private_key.public_key() -# self.kid = kid -# -# def wrap_key(self, key, algorithm='RSA'): -# if algorithm == 'RSA': -# return self.public_key.encrypt(key, -# OAEP( -# mgf=MGF1(algorithm=SHA1()), -# algorithm=SHA1(), -# label=None) -# ) -# -# raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) -# -# def unwrap_key(self, key, algorithm): -# if algorithm == 'RSA': -# return self.private_key.decrypt(key, -# OAEP( -# mgf=MGF1(algorithm=SHA1()), -# algorithm=SHA1(), -# label=None) -# ) -# -# raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) -# -# def get_key_wrap_algorithm(self): -# return 'RSA' -# -# def get_kid(self): -# return self.kid diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_no_retry.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_no_retry.yaml new file mode 100644 index 000000000000..03f14b2eed82 --- /dev/null +++ b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_no_retry.yaml @@ -0,0 +1,133 @@ +interactions: +- request: + body: '{"TableName": "uttable7d860a15"}' + headers: + Accept: + - application/json;odata=minimalmetadata + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '32' + Content-Type: + - application/json;odata=nometadata + DataServiceVersion: + - '3.0' + Date: + - Thu, 15 Apr 2021 17:53:50 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Thu, 15 Apr 2021 17:53:50 GMT + x-ms-version: + - '2019-02-02' + method: POST + uri: https://fake_table_account.table.core.windows.net/Tables + response: + body: + string: '{"odata.metadata":"https://fake_table_account.table.core.windows.net/$metadata#Tables/@Element","TableName":"uttable7d860a15"}' + headers: + cache-control: + - no-cache + content-type: + - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + date: + - Thu, 15 Apr 2021 17:53:50 GMT + location: + - https://fake_table_account.table.core.windows.net/Tables('uttable7d860a15') + server: + - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + transfer-encoding: + - chunked + x-content-type-options: + - nosniff + x-ms-version: + - '2019-02-02' + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Date: + - Thu, 15 Apr 2021 17:53:50 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Thu, 15 Apr 2021 17:53:50 GMT + x-ms-version: + - '2019-02-02' + method: DELETE + uri: https://fake_table_account.table.core.windows.net/Tables('uttable7d860a15') + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Thu, 15 Apr 2021 17:53:50 GMT + server: + - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + x-content-type-options: + - nosniff + x-ms-version: + - '2019-02-02' + status: + code: 204 + message: No Content +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Date: + - Thu, 15 Apr 2021 17:53:50 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Thu, 15 Apr 2021 17:53:50 GMT + x-ms-version: + - '2019-02-02' + method: DELETE + uri: https://fake_table_account.table.core.windows.net/Tables('uttable7d860a15') + response: + body: + string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The + specified resource does not exist.\nRequestId:1044a6a5-8002-004f-3e20-32ab70000000\nTime:2021-04-15T17:53:50.6263170Z"}}}' + headers: + cache-control: + - no-cache + content-type: + - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + date: + - Thu, 15 Apr 2021 17:53:50 GMT + server: + - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + transfer-encoding: + - chunked + x-content-type-options: + - nosniff + x-ms-version: + - '2019-02-02' + status: + code: 404 + message: Not Found +version: 1 diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_server_error.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_server_error.yaml index 8293a90dcd17..80c50417e8d7 100644 --- a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_server_error.yaml +++ b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_server_error.yaml @@ -15,11 +15,11 @@ interactions: DataServiceVersion: - '3.0' Date: - - Wed, 07 Apr 2021 22:32:50 GMT + - Thu, 15 Apr 2021 17:53:50 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b7 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Wed, 07 Apr 2021 22:32:50 GMT + - Thu, 15 Apr 2021 17:53:50 GMT x-ms-version: - '2019-02-02' method: POST @@ -33,7 +33,7 @@ interactions: content-type: - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 date: - - Wed, 07 Apr 2021 22:32:51 GMT + - Thu, 15 Apr 2021 17:53:50 GMT location: - https://fake_table_account.table.core.windows.net/Tables('uttable270d0f94') server: @@ -63,11 +63,11 @@ interactions: DataServiceVersion: - '3.0' Date: - - Wed, 07 Apr 2021 22:32:50 GMT + - Thu, 15 Apr 2021 17:53:50 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b7 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Wed, 07 Apr 2021 22:32:50 GMT + - Thu, 15 Apr 2021 17:53:50 GMT x-ms-version: - '2019-02-02' method: POST @@ -75,14 +75,14 @@ interactions: response: body: string: '{"odata.error":{"code":"TableAlreadyExists","message":{"lang":"en-US","value":"The - table specified already exists.\nRequestId:0451134d-a002-0077-1dfd-2b0fb0000000\nTime:2021-04-07T22:33:07.4938348Z"}}}' + table specified already exists.\nRequestId:320a0268-0002-0041-5a20-3282c0000000\nTime:2021-04-15T17:53:50.7966768Z"}}}' headers: cache-control: - no-cache content-type: - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: @@ -106,11 +106,11 @@ interactions: Content-Length: - '0' Date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b7 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -124,7 +124,7 @@ interactions: content-length: - '0' date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 x-content-type-options: @@ -146,11 +146,11 @@ interactions: Content-Length: - '0' Date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b7 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -158,14 +158,14 @@ interactions: response: body: string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:0451138c-a002-0077-5afd-2b0fb0000000\nTime:2021-04-07T22:33:07.7860414Z"}}}' + specified resource does not exist.\nRequestId:320a027b-0002-0041-6c20-3282c0000000\nTime:2021-04-15T17:53:50.8757336Z"}}}' headers: cache-control: - no-cache content-type: - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 date: - - Wed, 07 Apr 2021 22:33:07 GMT + - Thu, 15 Apr 2021 17:53:50 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_socket_timeout.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_socket_timeout.yaml deleted file mode 100644 index 2b84cc03df6c..000000000000 --- a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_socket_timeout.yaml +++ /dev/null @@ -1,85 +0,0 @@ -interactions: -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '0' - Date: - - Mon, 05 Apr 2021 20:53:34 GMT - User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) - x-ms-date: - - Mon, 05 Apr 2021 20:53:34 GMT - x-ms-version: - - '2019-02-02' - method: DELETE - uri: https://fake_table_account.table.core.windows.net/Tables('uttable46e31063') - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Mon, 05 Apr 2021 20:53:34 GMT - server: - - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - x-content-type-options: - - nosniff - x-ms-version: - - '2019-02-02' - status: - code: 204 - message: No Content -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '0' - Date: - - Mon, 05 Apr 2021 20:53:34 GMT - User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) - x-ms-date: - - Mon, 05 Apr 2021 20:53:34 GMT - x-ms-version: - - '2019-02-02' - method: DELETE - uri: https://fake_table_account.table.core.windows.net/Tables('uttable46e31063') - response: - body: - string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:05291b19-e002-0004-345d-2a5723000000\nTime:2021-04-05T20:53:34.4934432Z"}}}' - headers: - cache-control: - - no-cache - content-type: - - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: - - Mon, 05 Apr 2021 20:53:34 GMT - server: - - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - transfer-encoding: - - chunked - x-content-type-options: - - nosniff - x-ms-version: - - '2019-02-02' - status: - code: 404 - message: Not Found -version: 1 diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_timeout.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_timeout.yaml index 65e4b858750d..dc72f12e344f 100644 --- a/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_timeout.yaml +++ b/sdk/tables/azure-data-tables/tests/recordings/test_retry.test_retry_on_timeout.yaml @@ -1,139 +1,78 @@ interactions: - request: - body: '{"TableName": "uttabledd800d7b"}' + body: null headers: Accept: - - application/json;odata=minimalmetadata + - application/xml Accept-Encoding: - gzip, deflate Connection: - keep-alive - Content-Length: - - '32' - Content-Type: - - application/json;odata=nometadata - DataServiceVersion: - - '3.0' Date: - - Mon, 05 Apr 2021 20:53:34 GMT + - Thu, 15 Apr 2021 17:53:53 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 20:53:34 GMT + - Thu, 15 Apr 2021 17:53:53 GMT x-ms-version: - '2019-02-02' - method: POST - uri: https://fake_table_account.table.core.windows.net/Tables + method: GET + uri: https://fake_table_account.table.core.windows.net/?restype=service&comp=properties response: body: - string: '{"odata.metadata":"https://fake_table_account.table.core.windows.net/$metadata#Tables/@Element","TableName":"uttabledd800d7b"}' + string: "\uFEFF1.0falsefalsefalsefalse1.0falsefalse1.0falsefalse" headers: - cache-control: - - no-cache content-type: - - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + - application/xml date: - - Mon, 05 Apr 2021 20:53:34 GMT - location: - - https://fake_table_account.table.core.windows.net/Tables('uttabledd800d7b') + - Thu, 15 Apr 2021 17:53:53 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked - x-content-type-options: - - nosniff x-ms-version: - '2019-02-02' status: - code: 201 - message: Created + code: 200 + message: OK - request: - body: '{"TableName": "uttabledd800d7b"}' + body: null headers: Accept: - - application/json;odata=minimalmetadata + - application/xml Accept-Encoding: - gzip, deflate Connection: - keep-alive - Content-Length: - - '32' - Content-Type: - - application/json;odata=nometadata - DataServiceVersion: - - '3.0' Date: - - Mon, 05 Apr 2021 20:53:34 GMT + - Thu, 15 Apr 2021 17:53:53 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 20:53:34 GMT + - Thu, 15 Apr 2021 17:53:53 GMT x-ms-version: - '2019-02-02' - method: POST - uri: https://fake_table_account.table.core.windows.net/Tables + method: GET + uri: https://fake_table_account.table.core.windows.net/?restype=service&comp=properties response: body: - string: '{"odata.error":{"code":"TableAlreadyExists","message":{"lang":"en-US","value":"The - table specified already exists.\nRequestId:f2519219-5002-0011-145d-2a4090000000\nTime:2021-04-05T20:53:52.6677367Z"}}}' + string: "\uFEFF1.0falsefalsefalsefalse1.0falsefalse1.0falsefalse" headers: - cache-control: - - no-cache content-type: - - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + - application/xml date: - - Mon, 05 Apr 2021 20:53:51 GMT + - Thu, 15 Apr 2021 17:53:53 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: - chunked - x-content-type-options: - - nosniff - x-ms-version: - - '2019-02-02' - status: - code: 409 - message: Conflict -- request: - body: null - headers: - Accept: - - application/json - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '0' - Date: - - Mon, 05 Apr 2021 20:53:52 GMT - User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) - x-ms-date: - - Mon, 05 Apr 2021 20:53:52 GMT - x-ms-version: - - '2019-02-02' - method: DELETE - uri: https://fake_table_account.table.core.windows.net/Tables('uttabledd800d7b') - response: - body: - string: '' - headers: - cache-control: - - no-cache - content-length: - - '0' - date: - - Mon, 05 Apr 2021 20:53:52 GMT - server: - - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - x-content-type-options: - - nosniff x-ms-version: - '2019-02-02' status: - code: 204 - message: No Content + code: 200 + message: OK - request: body: null headers: @@ -146,11 +85,11 @@ interactions: Content-Length: - '0' Date: - - Mon, 05 Apr 2021 20:53:52 GMT + - Thu, 15 Apr 2021 17:53:53 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 20:53:52 GMT + - Thu, 15 Apr 2021 17:53:53 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -158,14 +97,14 @@ interactions: response: body: string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:f2519222-5002-0011-1c5d-2a4090000000\nTime:2021-04-05T20:53:52.8198466Z"}}}' + specified resource does not exist.\nRequestId:b1431998-9002-000e-3520-32f394000000\nTime:2021-04-15T17:53:53.7595042Z"}}}' headers: cache-control: - no-cache content-type: - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 date: - - Mon, 05 Apr 2021 20:53:52 GMT + - Thu, 15 Apr 2021 17:53:53 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_no_retry_async.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_no_retry_async.yaml new file mode 100644 index 000000000000..a29e3481d59d --- /dev/null +++ b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_no_retry_async.yaml @@ -0,0 +1,99 @@ +interactions: +- request: + body: '{"TableName": "uttable17050f0f"}' + headers: + Accept: + - application/json;odata=minimalmetadata + Content-Length: + - '32' + Content-Type: + - application/json;odata=nometadata + DataServiceVersion: + - '3.0' + Date: + - Thu, 15 Apr 2021 17:53:53 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Thu, 15 Apr 2021 17:53:53 GMT + x-ms-version: + - '2019-02-02' + method: POST + uri: https://fake_table_account.table.core.windows.net/Tables + response: + body: + string: '{"odata.metadata":"https://fake_table_account.table.core.windows.net/$metadata#Tables/@Element","TableName":"uttable17050f0f"}' + headers: + cache-control: no-cache + content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + date: Thu, 15 Apr 2021 17:53:53 GMT + location: https://fake_table_account.table.core.windows.net/Tables('uttable17050f0f') + server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + transfer-encoding: chunked + x-content-type-options: nosniff + x-ms-version: '2019-02-02' + status: + code: 201 + message: Created + url: https://seankaneprim.table.core.windows.net/Tables +- request: + body: null + headers: + Accept: + - application/json + Date: + - Thu, 15 Apr 2021 17:53:53 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Thu, 15 Apr 2021 17:53:53 GMT + x-ms-version: + - '2019-02-02' + method: DELETE + uri: https://fake_table_account.table.core.windows.net/Tables('uttable17050f0f') + response: + body: + string: '' + headers: + cache-control: no-cache + content-length: '0' + date: Thu, 15 Apr 2021 17:53:53 GMT + server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + x-content-type-options: nosniff + x-ms-version: '2019-02-02' + status: + code: 204 + message: No Content + url: https://seankaneprim.table.core.windows.net/Tables('uttable17050f0f') +- request: + body: null + headers: + Accept: + - application/json + Date: + - Thu, 15 Apr 2021 17:53:53 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Thu, 15 Apr 2021 17:53:53 GMT + x-ms-version: + - '2019-02-02' + method: DELETE + uri: https://fake_table_account.table.core.windows.net/Tables('uttable17050f0f') + response: + body: + string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The + specified resource does not exist.\nRequestId:a08df4d6-3002-005a-7220-32bcc3000000\nTime:2021-04-15T17:53:53.9100295Z"}}}' + headers: + cache-control: no-cache + content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 + date: Thu, 15 Apr 2021 17:53:53 GMT + server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + transfer-encoding: chunked + x-content-type-options: nosniff + x-ms-version: '2019-02-02' + status: + code: 404 + message: Not Found + url: https://seankaneprim.table.core.windows.net/Tables('uttable17050f0f') +version: 1 diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_server_error_async.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_server_error_async.yaml index 96f9ff6069f3..d8b9629db3b8 100644 --- a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_server_error_async.yaml +++ b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_server_error_async.yaml @@ -11,11 +11,11 @@ interactions: DataServiceVersion: - '3.0' Date: - - Mon, 05 Apr 2021 21:51:29 GMT + - Thu, 15 Apr 2021 17:53:53 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:51:29 GMT + - Thu, 15 Apr 2021 17:53:53 GMT x-ms-version: - '2019-02-02' method: POST @@ -26,7 +26,7 @@ interactions: headers: cache-control: no-cache content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:51:28 GMT + date: Thu, 15 Apr 2021 17:53:53 GMT location: https://fake_table_account.table.core.windows.net/Tables('uttable1df148e') server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: chunked @@ -48,11 +48,11 @@ interactions: DataServiceVersion: - '3.0' Date: - - Mon, 05 Apr 2021 21:51:29 GMT + - Thu, 15 Apr 2021 17:53:53 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:51:29 GMT + - Thu, 15 Apr 2021 17:53:53 GMT x-ms-version: - '2019-02-02' method: POST @@ -60,11 +60,11 @@ interactions: response: body: string: '{"odata.error":{"code":"TableAlreadyExists","message":{"lang":"en-US","value":"The - table specified already exists.\nRequestId:33e9accd-6002-00a3-4c65-2abfe1000000\nTime:2021-04-05T21:51:48.9118439Z"}}}' + table specified already exists.\nRequestId:7a3ddafb-b002-0009-1a20-329ff7000000\nTime:2021-04-15T17:53:54.0094563Z"}}}' headers: cache-control: no-cache content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:51:48 GMT + date: Thu, 15 Apr 2021 17:53:53 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: chunked x-content-type-options: nosniff @@ -79,11 +79,11 @@ interactions: Accept: - application/json Date: - - Mon, 05 Apr 2021 21:51:48 GMT + - Thu, 15 Apr 2021 17:53:54 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:51:48 GMT + - Thu, 15 Apr 2021 17:53:54 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -94,7 +94,7 @@ interactions: headers: cache-control: no-cache content-length: '0' - date: Mon, 05 Apr 2021 21:51:48 GMT + date: Thu, 15 Apr 2021 17:53:53 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 x-content-type-options: nosniff x-ms-version: '2019-02-02' @@ -108,11 +108,11 @@ interactions: Accept: - application/json Date: - - Mon, 05 Apr 2021 21:51:48 GMT + - Thu, 15 Apr 2021 17:53:54 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:51:48 GMT + - Thu, 15 Apr 2021 17:53:54 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -120,11 +120,11 @@ interactions: response: body: string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:33e9acd2-6002-00a3-4f65-2abfe1000000\nTime:2021-04-05T21:51:48.9518715Z"}}}' + specified resource does not exist.\nRequestId:7a3ddaff-b002-0009-1e20-329ff7000000\nTime:2021-04-15T17:53:54.0504843Z"}}}' headers: cache-control: no-cache content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:51:48 GMT + date: Thu, 15 Apr 2021 17:53:53 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: chunked x-content-type-options: nosniff diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_socket_timeout_async.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_socket_timeout_async.yaml deleted file mode 100644 index f1474e254cc1..000000000000 --- a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_socket_timeout_async.yaml +++ /dev/null @@ -1,62 +0,0 @@ -interactions: -- request: - body: null - headers: - Accept: - - application/json - Date: - - Mon, 05 Apr 2021 21:53:16 GMT - User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) - x-ms-date: - - Mon, 05 Apr 2021 21:53:16 GMT - x-ms-version: - - '2019-02-02' - method: DELETE - uri: https://fake_table_account.table.core.windows.net/Tables('uttable2b89155d') - response: - body: - string: '' - headers: - cache-control: no-cache - content-length: '0' - date: Mon, 05 Apr 2021 21:53:16 GMT - server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - x-content-type-options: nosniff - x-ms-version: '2019-02-02' - status: - code: 204 - message: No Content - url: https://seankaneprim.table.core.windows.net/Tables('uttable2b89155d') -- request: - body: null - headers: - Accept: - - application/json - Date: - - Mon, 05 Apr 2021 21:53:16 GMT - User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) - x-ms-date: - - Mon, 05 Apr 2021 21:53:16 GMT - x-ms-version: - - '2019-02-02' - method: DELETE - uri: https://fake_table_account.table.core.windows.net/Tables('uttable2b89155d') - response: - body: - string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:c5dc9c39-d002-00a6-5e66-2a6d3a000000\nTime:2021-04-05T21:53:16.8613108Z"}}}' - headers: - cache-control: no-cache - content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:53:16 GMT - server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - transfer-encoding: chunked - x-content-type-options: nosniff - x-ms-version: '2019-02-02' - status: - code: 404 - message: Not Found - url: https://seankaneprim.table.core.windows.net/Tables('uttable2b89155d') -version: 1 diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_timeout_async.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_timeout_async.yaml index 1642a42cbaf0..67937156a178 100644 --- a/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_timeout_async.yaml +++ b/sdk/tables/azure-data-tables/tests/recordings/test_retry_async.test_retry_on_timeout_async.yaml @@ -1,118 +1,73 @@ interactions: - request: - body: '{"TableName": "uttable9f4b1275"}' - headers: - Accept: - - application/json;odata=minimalmetadata - Content-Length: - - '32' - Content-Type: - - application/json;odata=nometadata - DataServiceVersion: - - '3.0' - Date: - - Mon, 05 Apr 2021 21:53:16 GMT - User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) - x-ms-date: - - Mon, 05 Apr 2021 21:53:16 GMT - x-ms-version: - - '2019-02-02' - method: POST - uri: https://fake_table_account.table.core.windows.net/Tables - response: - body: - string: '{"odata.metadata":"https://fake_table_account.table.core.windows.net/$metadata#Tables/@Element","TableName":"uttable9f4b1275"}' - headers: - cache-control: no-cache - content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:53:16 GMT - location: https://fake_table_account.table.core.windows.net/Tables('uttable9f4b1275') - server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - transfer-encoding: chunked - x-content-type-options: nosniff - x-ms-version: '2019-02-02' - status: - code: 201 - message: Created - url: https://seankaneprim.table.core.windows.net/Tables -- request: - body: '{"TableName": "uttable9f4b1275"}' + body: null headers: Accept: - - application/json;odata=minimalmetadata - Content-Length: - - '32' - Content-Type: - - application/json;odata=nometadata - DataServiceVersion: - - '3.0' + - application/xml Date: - - Mon, 05 Apr 2021 21:53:16 GMT + - Thu, 15 Apr 2021 17:53:58 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:53:16 GMT + - Thu, 15 Apr 2021 17:53:58 GMT x-ms-version: - '2019-02-02' - method: POST - uri: https://fake_table_account.table.core.windows.net/Tables + method: GET + uri: https://fake_table_account.table.core.windows.net/?restype=service&comp=properties response: body: - string: '{"odata.error":{"code":"TableAlreadyExists","message":{"lang":"en-US","value":"The - table specified already exists.\nRequestId:27a14ef8-3002-0007-6766-2ab647000000\nTime:2021-04-05T21:53:37.7044890Z"}}}' + string: "\uFEFF1.0falsefalsefalsefalse1.0falsefalse1.0falsefalse" headers: - cache-control: no-cache - content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:53:37 GMT + content-type: application/xml + date: Thu, 15 Apr 2021 17:53:58 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: chunked - x-content-type-options: nosniff x-ms-version: '2019-02-02' status: - code: 409 - message: Conflict - url: https://seankaneprim.table.core.windows.net/Tables + code: 200 + message: OK + url: https://seankaneprim.table.core.windows.net/?restype=service&comp=properties - request: body: null headers: Accept: - - application/json + - application/xml Date: - - Mon, 05 Apr 2021 21:53:37 GMT + - Thu, 15 Apr 2021 17:53:58 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:53:37 GMT + - Thu, 15 Apr 2021 17:53:58 GMT x-ms-version: - '2019-02-02' - method: DELETE - uri: https://fake_table_account.table.core.windows.net/Tables('uttable9f4b1275') + method: GET + uri: https://fake_table_account.table.core.windows.net/?restype=service&comp=properties response: body: - string: '' + string: "\uFEFF1.0falsefalsefalsefalse1.0falsefalse1.0falsefalse" headers: - cache-control: no-cache - content-length: '0' - date: Mon, 05 Apr 2021 21:53:37 GMT + content-type: application/xml + date: Thu, 15 Apr 2021 17:53:58 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 - x-content-type-options: nosniff + transfer-encoding: chunked x-ms-version: '2019-02-02' status: - code: 204 - message: No Content - url: https://seankaneprim.table.core.windows.net/Tables('uttable9f4b1275') + code: 200 + message: OK + url: https://seankaneprim.table.core.windows.net/?restype=service&comp=properties - request: body: null headers: Accept: - application/json Date: - - Mon, 05 Apr 2021 21:53:37 GMT + - Thu, 15 Apr 2021 17:53:58 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b6 Python/3.7.4 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Mon, 05 Apr 2021 21:53:37 GMT + - Thu, 15 Apr 2021 17:53:58 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -120,11 +75,11 @@ interactions: response: body: string: '{"odata.error":{"code":"ResourceNotFound","message":{"lang":"en-US","value":"The - specified resource does not exist.\nRequestId:27a14f00-3002-0007-6d66-2ab647000000\nTime:2021-04-05T21:53:37.7625305Z"}}}' + specified resource does not exist.\nRequestId:070c53df-6002-00a3-2d20-32bfe1000000\nTime:2021-04-15T17:53:58.5502708Z"}}}' headers: cache-control: no-cache content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Mon, 05 Apr 2021 21:53:37 GMT + date: Thu, 15 Apr 2021 17:53:58 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: chunked x-content-type-options: nosniff diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml index b421fc2200f5..ac2c18bc5bc4 100644 --- a/sdk/tables/azure-data-tables/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml +++ b/sdk/tables/azure-data-tables/tests/recordings/test_table.test_set_table_acl_too_many_ids.yaml @@ -15,11 +15,11 @@ interactions: DataServiceVersion: - '3.0' Date: - - Fri, 18 Dec 2020 17:29:43 GMT + - Wed, 14 Apr 2021 18:10:59 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b4 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Fri, 18 Dec 2020 17:29:43 GMT + - Wed, 14 Apr 2021 18:10:59 GMT x-ms-version: - '2019-02-02' method: POST @@ -33,7 +33,7 @@ interactions: content-type: - application/json;odata=minimalmetadata;streaming=true;charset=utf-8 date: - - Fri, 18 Dec 2020 17:29:43 GMT + - Wed, 14 Apr 2021 18:10:59 GMT location: - https://fake_table_account.table.core.windows.net/Tables('pytablesync6f17111b') server: @@ -47,6 +47,55 @@ interactions: status: code: 201 message: Created +- request: + body: ' + + id0id1id2id3id4id5' + headers: + Accept: + - application/xml + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '372' + Content-Type: + - application/xml + Date: + - Wed, 14 Apr 2021 18:11:00 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Wed, 14 Apr 2021 18:11:00 GMT + x-ms-version: + - '2019-02-02' + method: PUT + uri: https://fake_table_account.table.core.windows.net/pytablesync6f17111b?comp=acl + response: + body: + string: 'InvalidXmlDocumentXML specified is not syntactically valid. + + RequestId:e4afd8b3-1002-0010-1959-311f4c000000 + + Time:2021-04-14T18:11:00.9932670Z' + headers: + content-length: + - '327' + content-type: + - application/xml + date: + - Wed, 14 Apr 2021 18:11:00 GMT + server: + - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + x-ms-error-code: + - InvalidXmlDocument + x-ms-version: + - '2019-02-02' + status: + code: 400 + message: XML specified is not syntactically valid. - request: body: null headers: @@ -59,11 +108,11 @@ interactions: Content-Length: - '0' Date: - - Fri, 18 Dec 2020 17:29:43 GMT + - Wed, 14 Apr 2021 18:11:01 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b4 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Fri, 18 Dec 2020 17:29:43 GMT + - Wed, 14 Apr 2021 18:11:01 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -77,7 +126,7 @@ interactions: content-length: - '0' date: - - Fri, 18 Dec 2020 17:29:43 GMT + - Wed, 14 Apr 2021 18:11:00 GMT server: - Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 x-content-type-options: diff --git a/sdk/tables/azure-data-tables/tests/recordings/test_table_async.test_set_table_acl_too_many_ids.yaml b/sdk/tables/azure-data-tables/tests/recordings/test_table_async.test_set_table_acl_too_many_ids.yaml index a47bad327d7a..a9c6722a7034 100644 --- a/sdk/tables/azure-data-tables/tests/recordings/test_table_async.test_set_table_acl_too_many_ids.yaml +++ b/sdk/tables/azure-data-tables/tests/recordings/test_table_async.test_set_table_acl_too_many_ids.yaml @@ -11,11 +11,11 @@ interactions: DataServiceVersion: - '3.0' Date: - - Fri, 18 Dec 2020 17:29:57 GMT + - Wed, 14 Apr 2021 18:11:01 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b4 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Fri, 18 Dec 2020 17:29:57 GMT + - Wed, 14 Apr 2021 18:11:01 GMT x-ms-version: - '2019-02-02' method: POST @@ -26,7 +26,7 @@ interactions: headers: cache-control: no-cache content-type: application/json;odata=minimalmetadata;streaming=true;charset=utf-8 - date: Fri, 18 Dec 2020 17:29:58 GMT + date: Wed, 14 Apr 2021 18:11:01 GMT location: https://fake_table_account.table.core.windows.net/Tables('pytableasynce03c1398') server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 transfer-encoding: chunked @@ -36,17 +36,57 @@ interactions: code: 201 message: Created url: https://seankaneprim.table.core.windows.net/Tables +- request: + body: ' + + id0id1id2id3id4id5' + headers: + Accept: + - application/xml + Content-Length: + - '372' + Content-Type: + - application/xml + Date: + - Wed, 14 Apr 2021 18:11:01 GMT + User-Agent: + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) + x-ms-date: + - Wed, 14 Apr 2021 18:11:01 GMT + x-ms-version: + - '2019-02-02' + method: PUT + uri: https://fake_table_account.table.core.windows.net/pytableasynce03c1398?comp=acl + response: + body: + string: 'InvalidXmlDocumentXML specified is not syntactically valid. + + RequestId:a8b32b21-b002-008f-3959-31534e000000 + + Time:2021-04-14T18:11:01.5586123Z' + headers: + content-length: '327' + content-type: application/xml + date: Wed, 14 Apr 2021 18:11:01 GMT + server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 + x-ms-error-code: InvalidXmlDocument + x-ms-version: '2019-02-02' + status: + code: 400 + message: XML specified is not syntactically valid. + url: https://seankaneprim.table.core.windows.net/pytableasynce03c1398?comp=acl - request: body: null headers: Accept: - application/json Date: - - Fri, 18 Dec 2020 17:29:58 GMT + - Wed, 14 Apr 2021 18:11:01 GMT User-Agent: - - azsdk-python-data-tables/12.0.0b4 Python/3.9.0rc1 (Windows-10-10.0.19041-SP0) + - azsdk-python-data-tables/12.0.0b7 Python/3.7.4 (Windows-10-10.0.19041-SP0) x-ms-date: - - Fri, 18 Dec 2020 17:29:58 GMT + - Wed, 14 Apr 2021 18:11:01 GMT x-ms-version: - '2019-02-02' method: DELETE @@ -57,7 +97,7 @@ interactions: headers: cache-control: no-cache content-length: '0' - date: Fri, 18 Dec 2020 17:29:58 GMT + date: Wed, 14 Apr 2021 18:11:01 GMT server: Windows-Azure-Table/1.0 Microsoft-HTTPAPI/2.0 x-content-type-options: nosniff x-ms-version: '2019-02-02' diff --git a/sdk/tables/azure-data-tables/tests/test_retry.py b/sdk/tables/azure-data-tables/tests/test_retry.py index 00deded55c32..1ca1388c49b4 100644 --- a/sdk/tables/azure-data-tables/tests/test_retry.py +++ b/sdk/tables/azure-data-tables/tests/test_retry.py @@ -15,15 +15,14 @@ AzureError, ClientAuthenticationError ) +from azure.core.pipeline.policies import RetryMode from azure.core.pipeline.transport import( RequestsTransport ) from azure.data.tables import ( TableServiceClient, - LocationMode, - LinearRetry, - ExponentialRetry, + LocationMode ) from _shared.testcase import ( @@ -97,61 +96,40 @@ def test_retry_on_server_error(self, tables_storage_account_name, tables_primary self.ts.delete_table(new_table_name) self._tear_down() - @TablesPreparer() def test_retry_on_timeout(self, tables_storage_account_name, tables_primary_storage_account_key): - retry = ExponentialRetry(initial_backoff=1, increment_base=2) - self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_policy=retry, default_table=False) - - new_table_name = self.get_resource_name('uttable') - callback = ResponseCallback(status=201, new_status=408).override_status - - try: - # The initial create will return 201, but we overwrite it with 408 and retry. - # The retry will then get a 409 conflict. - with pytest.raises(ResourceExistsError): - self.ts.create_table(new_table_name, raw_response_hook=callback) - finally: - self.ts.delete_table(new_table_name) - self._tear_down() - - - @TablesPreparer() - def test_retry_callback_and_retry_context(self, tables_storage_account_name, tables_primary_storage_account_key): - retry = LinearRetry(backoff=1) - self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_policy=retry, default_table=False) - - new_table_name = self.get_resource_name('uttable') - callback = ResponseCallback(status=201, new_status=408).override_status + self._set_up( + tables_storage_account_name, + tables_primary_storage_account_key, + default_table=False, + retry_mode=RetryMode.Exponential, + retry_backoff_factor=1 + ) - def assert_exception_is_present_on_retry_context(**kwargs): - self.assertIsNotNone(kwargs.get('response')) - self.assertEqual(kwargs['response'].status_code, 408) + callback = ResponseCallback(status=200, new_status=408).override_first_status try: - # The initial create will return 201, but we overwrite it with 408 and retry. - # The retry will then get a 409 conflict. - with pytest.raises(ResourceExistsError): - self.ts.create_table(new_table_name, raw_response_hook=callback, retry_hook=assert_exception_is_present_on_retry_context) + # The initial get will return 200, but we overwrite it with 408 and retry. + # The retry will then succeed. + self.ts.get_service_properties(raw_response_hook=callback) finally: - self.ts.delete_table(new_table_name) self._tear_down() @pytest.mark.live_test_only @TablesPreparer() def test_retry_on_socket_timeout(self, tables_storage_account_name, tables_primary_storage_account_key): - retry = LinearRetry(backoff=1) retry_transport = RetryRequestTransport(connection_timeout=11, read_timeout=0.000000000001) self._set_up( tables_storage_account_name, tables_primary_storage_account_key, - retry_policy=retry, transport=retry_transport, - default_table=False) + default_table=False, + retry_mode=RetryMode.Fixed, + retry_backoff_factor=1) new_table_name = self.get_resource_name('uttable') try: with pytest.raises(AzureError) as error: - self.ts.create_table(new_table_name) + self.ts.get_service_properties() # 3 retries + 1 original == 4 assert retry_transport.count == 4 @@ -159,29 +137,24 @@ def test_retry_on_socket_timeout(self, tables_storage_account_name, tables_prima self.assertTrue('read timeout' in str(error.value), 'Expected socket timeout but got different exception.') finally: - # we must make the timeout normal again to let the delete operation succeed - self.ts.delete_table(new_table_name, connection_timeout=(11, 11)) - self._tear_down(connection_timeout=(11, 11)) - + self._tear_down() - # Waiting on fix to client pipeline - # @TablesPreparer() - # def test_no_retry(self, tables_storage_account_name, tables_primary_storage_account_key): - # self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_total=0, default_table=False) + @TablesPreparer() + def test_no_retry(self, tables_storage_account_name, tables_primary_storage_account_key): + self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_total=0, default_table=False) - # new_table_name = self.get_resource_name('uttable') + new_table_name = self.get_resource_name('uttable') - # # Force the create call to 'timeout' with a 408 - # callback = ResponseCallback(status=201, new_status=408).override_status + # Force the create call to 'timeout' with a 408 + callback = ResponseCallback(status=201, new_status=500).override_status - # try: - # with pytest.raises(HttpResponseError) as error: - # self.ts.create_table(new_table_name, raw_response_hook=callback) - # self.assertEqual(error.value.response.status_code, 408) - # self.assertEqual(error.value.reason, 'Created') + try: + with pytest.raises(HttpResponseError) as error: + self.ts.create_table(new_table_name, raw_response_hook=callback) + assert error.value.response.status_code == 500 + assert error.value.reason == 'Created' - # finally: - # self.ts.delete_table(new_table_name) - # self._tear_down() + finally: + self.ts.delete_table(new_table_name) + self._tear_down() # ------------------------------------------------------------------------------ - diff --git a/sdk/tables/azure-data-tables/tests/test_retry_async.py b/sdk/tables/azure-data-tables/tests/test_retry_async.py index 9d71522723fc..bc396831de61 100644 --- a/sdk/tables/azure-data-tables/tests/test_retry_async.py +++ b/sdk/tables/azure-data-tables/tests/test_retry_async.py @@ -13,14 +13,14 @@ HttpResponseError, ResourceExistsError, AzureError, - ClientAuthenticationError + ResourceNotFoundError ) +from azure.core.pipeline.policies import RetryMode from azure.core.pipeline.transport import( AioHttpTransport ) from azure.data.tables.aio import TableServiceClient -from azure.data.tables.aio._policies_async import LinearRetry, ExponentialRetry from azure.data.tables import LocationMode from _shared.asynctestcase import AsyncTableTestCase @@ -95,61 +95,40 @@ async def test_retry_on_server_error_async(self, tables_storage_account_name, ta await self.ts.delete_table(new_table_name) await self._tear_down() - @TablesPreparer() async def test_retry_on_timeout_async(self, tables_storage_account_name, tables_primary_storage_account_key): - retry = ExponentialRetry(initial_backoff=1, increment_base=2) - await self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_policy=retry, default_table=False) - - new_table_name = self.get_resource_name('uttable') - callback = ResponseCallback(status=201, new_status=408).override_status - - try: - # The initial create will return 201, but we overwrite it with 408 and retry. - # The retry will then get a 409 conflict. - with pytest.raises(ResourceExistsError): - await self.ts.create_table(new_table_name, raw_response_hook=callback) - finally: - await self.ts.delete_table(new_table_name) - await self._tear_down() - - - @TablesPreparer() - async def test_retry_callback_and_retry_context_async(self, tables_storage_account_name, tables_primary_storage_account_key): - retry = LinearRetry(backoff=1) - await self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_policy=retry, default_table=False) + await self._set_up( + tables_storage_account_name, + tables_primary_storage_account_key, + default_table=False, + retry_mode=RetryMode.Exponential, + retry_backoff_factor=1) - new_table_name = self.get_resource_name('uttable') - callback = ResponseCallback(status=201, new_status=408).override_status + callback = ResponseCallback(status=200, new_status=408).override_first_status - def assert_exception_is_present_on_retry_context(**kwargs): - self.assertIsNotNone(kwargs.get('response')) - self.assertEqual(kwargs['response'].status_code, 408) try: # The initial create will return 201, but we overwrite it with 408 and retry. - # The retry will then get a 409 conflict. - with pytest.raises(ResourceExistsError): - await self.ts.create_table(new_table_name, raw_response_hook=callback, retry_hook=assert_exception_is_present_on_retry_context) + # The retry will then succeed. + await self.ts.get_service_properties(raw_response_hook=callback) finally: - await self.ts.delete_table(new_table_name) await self._tear_down() @pytest.mark.live_test_only @TablesPreparer() async def test_retry_on_socket_timeout_async(self, tables_storage_account_name, tables_primary_storage_account_key): - retry = LinearRetry(backoff=1) retry_transport = RetryAioHttpTransport(connection_timeout=11, read_timeout=0.000000000001) await self._set_up( tables_storage_account_name, tables_primary_storage_account_key, - retry_policy=retry, + retry_mode=RetryMode.Fixed, + retry_backoff_factor=1, transport=retry_transport, default_table=False) new_table_name = self.get_resource_name('uttable') try: with pytest.raises(AzureError) as error: - await self.ts.create_table(new_table_name) + await self.ts.get_service_properties() # 3 retries + 1 original == 4 assert retry_transport.count == 4 @@ -157,31 +136,24 @@ async def test_retry_on_socket_timeout_async(self, tables_storage_account_name, self.assertTrue('Timeout on reading' in str(error.value), 'Expected socket timeout but got different exception.') finally: - # TODO: Why can I not just reset the connection timeout??? - await self._set_up(tables_storage_account_name, tables_primary_storage_account_key, default_table=False) - # we must make the timeout normal again to let the delete operation succeed - await self.ts.delete_table(new_table_name) await self._tear_down() + @TablesPreparer() + async def test_no_retry_async(self, tables_storage_account_name, tables_primary_storage_account_key): + await self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_total=0, default_table=False) - # Waiting on fix to client pipeline - # @TablesPreparer() - # async def test_no_retry_async(self, tables_storage_account_name, tables_primary_storage_account_key): - # await self._set_up(tables_storage_account_name, tables_primary_storage_account_key, retry_total=0, default_table=False) - - # new_table_name = self.get_resource_name('uttable') + new_table_name = self.get_resource_name('uttable') - # # Force the create call to 'timeout' with a 408 - # callback = ResponseCallback(status=201, new_status=408).override_status + # Force the create call to error with a 500 + callback = ResponseCallback(status=201, new_status=500).override_status - # try: - # with with pytest.raises(HttpResponseError) as error: - # await self.ts.create_table(new_table_name, raw_response_hook=callback) - # self.assertEqual(error.value.response.status_code, 408) - # self.assertEqual(error.value.reason, 'Created') + try: + with pytest.raises(HttpResponseError) as error: + await self.ts.create_table(new_table_name, raw_response_hook=callback) + assert error.value.response.status_code == 500 + assert error.value.reason == 'Created' - # finally: - # await self.ts.delete_table(new_table_name) - # await self._tear_down() + finally: + await self.ts.delete_table(new_table_name) + await self._tear_down() # ------------------------------------------------------------------------------ - diff --git a/sdk/tables/azure-data-tables/tests/test_table.py b/sdk/tables/azure-data-tables/tests/test_table.py index da3697e27903..711bd2f50c82 100644 --- a/sdk/tables/azure-data-tables/tests/test_table.py +++ b/sdk/tables/azure-data-tables/tests/test_table.py @@ -136,7 +136,7 @@ def test_create_table_fail_on_exist(self, tables_storage_account_name, tables_pr ts.create_table(table_name) name_filter = "TableName eq '{}'".format(table_name) - existing = list(ts.query_tables(filter=name_filter)) + existing = list(ts.query_tables(name_filter)) # Assert assert created is not None @@ -156,7 +156,7 @@ def test_query_tables_per_page(self, tables_storage_account_name, tables_primary query_filter = "TableName eq 'mytable0' or TableName eq 'mytable1' or TableName eq 'mytable2'" table_count = 0 page_count = 0 - for table_page in ts.query_tables(filter=query_filter, results_per_page=2).by_page(): + for table_page in ts.query_tables(query_filter, results_per_page=2).by_page(): temp_count = 0 for table in table_page: @@ -231,7 +231,7 @@ def test_query_tables_with_filter(self, tables_storage_account_name, tables_prim # Act name_filter = "TableName eq '{}'".format(t.table_name) - tables = list(ts.query_tables(filter=name_filter)) + tables = list(ts.query_tables(name_filter)) for table_item in tables: assert isinstance(table_item, TableItem) @@ -433,7 +433,7 @@ def test_set_table_acl_too_many_ids(self, tables_storage_account_name, tables_pr # Assert with pytest.raises(ValueError): - table.set_table_access_policy(table_name=table.table_name, signed_identifiers=identifiers) + table.set_table_access_policy(signed_identifiers=identifiers) finally: ts.delete_table(table.table_name) diff --git a/sdk/tables/azure-data-tables/tests/test_table_async.py b/sdk/tables/azure-data-tables/tests/test_table_async.py index c04d70f85695..e704e3eee1f8 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_async.py @@ -80,7 +80,7 @@ async def test_create_table_fail_on_exist(self, tables_storage_account_name, tab await ts.create_table(table_name=table_name) name_filter = "TableName eq '{}'".format(table_name) - existing = ts.query_tables(filter=name_filter) + existing = ts.query_tables(name_filter) # Assert assert isinstance(created, TableClient) @@ -100,7 +100,7 @@ async def test_query_tables_per_page(self, tables_storage_account_name, tables_p query_filter = "TableName eq 'myasynctable0' or TableName eq 'myasynctable1' or TableName eq 'myasynctable2'" table_count = 0 page_count = 0 - async for table_page in ts.query_tables(filter=query_filter, results_per_page=2).by_page(): + async for table_page in ts.query_tables(query_filter, results_per_page=2).by_page(): temp_count = 0 async for table in table_page: @@ -146,7 +146,7 @@ async def test_query_tables_with_filter(self, tables_storage_account_name, table # Act name_filter = "TableName eq '{}'".format(table.table_name) tables = [] - async for t in ts.query_tables(filter=name_filter): + async for t in ts.query_tables(name_filter): tables.append(t) # Assert @@ -342,7 +342,7 @@ async def test_set_table_acl_too_many_ids(self, tables_storage_account_name, tab # Assert with pytest.raises(ValueError): - await table.set_table_access_policy(table_name=table.table_name, signed_identifiers=identifiers) + await table.set_table_access_policy(signed_identifiers=identifiers) finally: await ts.delete_table(table.table_name) diff --git a/sdk/tables/azure-data-tables/tests/test_table_client.py b/sdk/tables/azure-data-tables/tests/test_table_client.py index 3c351722565c..67eaf3c6791a 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client.py @@ -213,7 +213,18 @@ def test_create_service_with_socket_timeout(self): # Assert self.validate_standard_account_endpoints(service, self.tables_storage_account_name, self.tables_primary_storage_account_key) assert service._client._client._pipeline._transport.connection_config.timeout == 22 - assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + assert default_service._client._client._pipeline._transport.connection_config.timeout == 300 + + # Assert Parent transport is shared with child client + service = TableServiceClient( + self.account_url(self.tables_storage_account_name, "table"), + credential=self.tables_primary_storage_account_key, + connection_timeout=22) + assert service._client._client._pipeline._transport.connection_config.timeout == 22 + table = service.get_table_client('tablename') + assert table._client._client._pipeline._transport._transport.connection_config.timeout == 22 + + # --Connection String Test Cases -------------------------------------------- def test_create_service_with_connection_string_key(self): @@ -447,10 +458,10 @@ def test_error_with_malformed_conn_str(self): with pytest.raises(ValueError) as e: service = service_type[0].from_connection_string(conn_str, table_name="test") - if conn_str in("", "foobar", "foo;bar;baz", ";"): + if conn_str in("", "foobar", "foo;bar;baz", ";", "foo=;bar=;", "=", "=;=="): assert str(e.value) == "Connection string is either blank or malformed." - elif conn_str in ("foobar=baz=foo" , "foo=;bar=;", "=", "=;=="): - assert str(e.value) == "Connection string missing required connection details." + elif conn_str in ("foobar=baz=foo"): + assert str(e.value) == "Connection string missing required connection details." def test_closing_pipeline_client(self): # Arrange @@ -503,3 +514,18 @@ def test_create_service_with_token(self): assert service.credential == self.token_credential assert not hasattr(service.credential, 'account_key') assert hasattr(service.credential, 'get_token') + + def test_create_client_with_api_version(self): + url = self.account_url(self.tables_storage_account_name, "table") + client = TableServiceClient(url, credential=self.tables_primary_storage_account_key) + assert client._client._config.version == "2019-02-02" + table = client.get_table_client('tablename') + assert table._client._config.version == "2019-02-02" + + client = TableServiceClient(url, credential=self.tables_primary_storage_account_key, api_version="2019-07-07") + assert client._client._config.version == "2019-07-07" + table = client.get_table_client('tablename') + assert table._client._config.version == "2019-07-07" + + with pytest.raises(ValueError): + TableServiceClient(url, credential=self.tables_primary_storage_account_key, api_version="foo") diff --git a/sdk/tables/azure-data-tables/tests/test_table_client_async.py b/sdk/tables/azure-data-tables/tests/test_table_client_async.py index c977fee9b085..92a9a32f599c 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client_async.py @@ -238,7 +238,16 @@ async def test_create_service_with_socket_timeout_async(self): # Assert self.validate_standard_account_endpoints(service, self.tables_storage_account_name, self.tables_primary_storage_account_key) assert service._client._client._pipeline._transport.connection_config.timeout == 22 - assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + assert default_service._client._client._pipeline._transport.connection_config.timeout == 300 + + # Assert Parent transport is shared with child client + service = TableServiceClient( + self.account_url(self.tables_storage_account_name, "table"), + credential=self.tables_primary_storage_account_key, + connection_timeout=22) + assert service._client._client._pipeline._transport.connection_config.timeout == 22 + table = service.get_table_client('tablename') + assert table._client._client._pipeline._transport._transport.connection_config.timeout == 22 # --Connection String Test Cases -------------------------------------------- @pytest.mark.asyncio @@ -483,10 +492,10 @@ async def test_error_with_malformed_conn_str_async(self): with pytest.raises(ValueError) as e: service = service_type[0].from_connection_string(conn_str, table_name="test") - if conn_str in("", "foobar", "foo;bar;baz", ";"): + if conn_str in("", "foobar", "foo;bar;baz", ";", "foo=;bar=;", "=", "=;=="): assert str(e.value) == "Connection string is either blank or malformed." - elif conn_str in ("foobar=baz=foo" , "foo=;bar=;", "=", "=;=="): - assert str(e.value) == "Connection string missing required connection details." + elif conn_str in ("foobar=baz=foo"): + assert str(e.value) == "Connection string missing required connection details." @pytest.mark.asyncio async def test_closing_pipeline_client_async(self): @@ -509,3 +518,19 @@ async def test_closing_pipeline_client_simple_async(self): service = client( self.account_url(self.tables_storage_account_name, "table"), credential=self.tables_primary_storage_account_key, table_name='table') await service.close() + + @pytest.mark.asyncio + async def test_create_client_with_api_version(self): + url = self.account_url(self.tables_storage_account_name, "table") + client = TableServiceClient(url, credential=self.tables_primary_storage_account_key) + assert client._client._config.version == "2019-02-02" + table = client.get_table_client('tablename') + assert table._client._config.version == "2019-02-02" + + client = TableServiceClient(url, credential=self.tables_primary_storage_account_key, api_version="2019-07-07") + assert client._client._config.version == "2019-07-07" + table = client.get_table_client('tablename') + assert table._client._config.version == "2019-07-07" + + with pytest.raises(ValueError): + TableServiceClient(url, credential=self.tables_primary_storage_account_key, api_version="foo") \ No newline at end of file diff --git a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py index 16fee77422e7..a87d3cbbfacf 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py @@ -271,7 +271,7 @@ def test_create_service_with_socket_timeout(self): # Assert self.validate_standard_account_endpoints(service, self.tables_cosmos_account_name, self.tables_primary_cosmos_account_key) assert service._client._client._pipeline._transport.connection_config.timeout == 22 - assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + assert default_service._client._client._pipeline._transport.connection_config.timeout == 300 # --Connection String Test Cases -------------------------------------------- @@ -519,10 +519,10 @@ def test_error_with_malformed_conn_str(self): with pytest.raises(ValueError) as e: service = service_type[0].from_connection_string(conn_str, table_name="test") - if conn_str in("", "foobar", "foo;bar;baz", ";"): + if conn_str in("", "foobar", "foo;bar;baz", ";", "foo=;bar=;", "=", "=;=="): assert str(e.value) == "Connection string is either blank or malformed." - elif conn_str in ("foobar=baz=foo" , "foo=;bar=;", "=", "=;=="): - assert str(e.value) == "Connection string missing required connection details." + elif conn_str in ("foobar=baz=foo"): + assert str(e.value) == "Connection string missing required connection details." def test_closing_pipeline_client(self): # Arrange diff --git a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py index a70c89346ac4..9f24c078a210 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py @@ -249,7 +249,7 @@ async def test_create_service_with_socket_timeout_async(self): # Assert self.validate_standard_account_endpoints(service, self.tables_cosmos_account_name, self.tables_primary_cosmos_account_key) assert service._client._client._pipeline._transport.connection_config.timeout == 22 - assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + assert default_service._client._client._pipeline._transport.connection_config.timeout == 300 # --Connection String Test Cases -------------------------------------------- @pytest.mark.asyncio @@ -495,10 +495,10 @@ async def test_error_with_malformed_conn_str_async(self): with pytest.raises(ValueError) as e: service = service_type[0].from_connection_string(conn_str, table_name="test") - if conn_str in("", "foobar", "foo;bar;baz", ";"): + if conn_str in("", "foobar", "foo;bar;baz", ";", "foo=;bar=;", "=", "=;=="): assert str(e.value) == "Connection string is either blank or malformed." - elif conn_str in ("foobar=baz=foo" , "foo=;bar=;", "=", "=;=="): - assert str(e.value) == "Connection string missing required connection details." + elif conn_str in ("foobar=baz=foo"): + assert str(e.value) == "Connection string missing required connection details." @pytest.mark.asyncio async def test_closing_pipeline_client_async(self): diff --git a/sdk/tables/azure-data-tables/tests/test_table_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_cosmos.py index c30c42ffeeca..56eadd94d1e7 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_cosmos.py @@ -158,7 +158,7 @@ def test_query_tables_per_page(self, tables_cosmos_account_name, tables_primary_ query_filter = "TableName eq 'mytable0' or TableName eq 'mytable1' or TableName eq 'mytable2'" table_count = 0 page_count = 0 - for table_page in ts.query_tables(filter=query_filter, results_per_page=2).by_page(): + for table_page in ts.query_tables(query_filter, results_per_page=2).by_page(): temp_count = 0 for table in table_page: @@ -202,7 +202,7 @@ def test_query_tables_with_filter(self, tables_cosmos_account_name, tables_prima # Act name_filter = "TableName eq '{}'".format(table.table_name) - tables = list(ts.query_tables(filter=name_filter)) + tables = list(ts.query_tables(name_filter)) # Assert assert tables is not None @@ -413,7 +413,7 @@ def test_set_table_acl_too_many_ids(self, tables_cosmos_account_name, tables_pri # Assert with pytest.raises(ValueError): - table.set_table_access_policy(table_name=table.table_name, signed_identifiers=identifiers) + table.set_table_access_policy(signed_identifiers=identifiers) finally: ts.delete_table(table.table_name) diff --git a/sdk/tables/azure-data-tables/tests/test_table_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_cosmos_async.py index 21009a2cce40..99fe4df383e8 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_cosmos_async.py @@ -102,7 +102,7 @@ async def test_query_tables_per_page(self, tables_cosmos_account_name, tables_pr query_filter = "TableName eq 'myasynctable0' or TableName eq 'myasynctable1' or TableName eq 'myasynctable2'" table_count = 0 page_count = 0 - async for table_page in ts.query_tables(filter=query_filter, results_per_page=2).by_page(): + async for table_page in ts.query_tables(query_filter, results_per_page=2).by_page(): temp_count = 0 async for table in table_page: @@ -148,7 +148,7 @@ async def test_query_tables_with_filter(self, tables_cosmos_account_name, tables # Act name_filter = "TableName eq '{}'".format(table.table_name) tables = [] - async for t in ts.query_tables(filter=name_filter): + async for t in ts.query_tables(name_filter): tables.append(t) # Assert @@ -361,7 +361,7 @@ async def test_set_table_acl_too_many_ids(self, tables_cosmos_account_name, tabl # Assert with pytest.raises(ValueError): - await table.set_table_access_policy(table_name=table.table_name, signed_identifiers=identifiers) + await table.set_table_access_policy(signed_identifiers=identifiers) finally: await ts.delete_table(table.table_name) diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity.py b/sdk/tables/azure-data-tables/tests/test_table_entity.py index f12c1bf19f1a..aa7e74ad69da 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity.py @@ -306,7 +306,7 @@ def test_url_encoding_at_symbol(self, tables_storage_account_name, tables_primar self.table.create_entity(entity) f = u"RowKey eq '{}'".format(entity["RowKey"]) - entities = self.table.query_entities(filter=f) + entities = self.table.query_entities(f) count = 0 for e in entities: assert e.PartitionKey == entity[u"PartitionKey"] @@ -318,7 +318,7 @@ def test_url_encoding_at_symbol(self, tables_storage_account_name, tables_primar assert count == 1 count = 0 - for e in self.table.query_entities(filter=f): + for e in self.table.query_entities(f): count += 1 assert count == 0 finally: @@ -350,7 +350,7 @@ def test_query_user_filter(self, tables_storage_account_name, tables_primary_sto # Act entities = self.table.query_entities( - filter="married eq @my_param", + "married eq @my_param", parameters={'my_param': entity['married']} ) @@ -376,7 +376,7 @@ def test_query_user_filter_multiple_params(self, tables_storage_account_name, ta 'my_param': True, 'rk': entity['RowKey'] } - entities = self.table.query_entities(filter="married eq @my_param and RowKey eq @rk", parameters=parameters) + entities = self.table.query_entities("married eq @my_param and RowKey eq @rk", parameters=parameters) length = 0 assert entities is not None @@ -399,7 +399,7 @@ def test_query_user_filter_integers(self, tables_storage_account_name, tables_pr parameters = { 'my_param': 40, } - entities = self.table.query_entities(filter="age lt @my_param", parameters=parameters) + entities = self.table.query_entities("age lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -422,7 +422,7 @@ def test_query_user_filter_floats(self, tables_storage_account_name, tables_prim parameters = { 'my_param': entity['ratio'] + 1, } - entities = self.table.query_entities(filter="ratio lt @my_param", parameters=parameters) + entities = self.table.query_entities("ratio lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -445,7 +445,7 @@ def test_query_user_filter_datetimes(self, tables_storage_account_name, tables_p parameters = { 'my_param': entity['birthday'], } - entities = self.table.query_entities(filter="birthday eq @my_param", parameters=parameters) + entities = self.table.query_entities("birthday eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -468,7 +468,7 @@ def test_query_user_filter_guids(self, tables_storage_account_name, tables_prima parameters = { 'my_param': entity['clsid'] } - entities = self.table.query_entities(filter="clsid eq @my_param", parameters=parameters) + entities = self.table.query_entities("clsid eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -497,7 +497,7 @@ def test_query_invalid_filter(self, tables_storage_account_name, tables_primary_ self.table.create_entity(base_entity) # Act with pytest.raises(HttpResponseError): - resp = self.table.query_entities(filter="aaa bbb ccc") + resp = self.table.query_entities("aaa bbb ccc") for row in resp: _ = row @@ -1252,7 +1252,7 @@ def test_unicode_property_value(self, tables_storage_account_name, tables_primar self.table.create_entity(entity=entity1) self.table.create_entity(entity=entity2) entities = list(self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey']))) + "PartitionKey eq '{}'".format(entity['PartitionKey']))) # Assert assert len(entities) == 2 @@ -1276,7 +1276,7 @@ def test_unicode_property_name(self, tables_storage_account_name, tables_primary self.table.create_entity(entity=entity1) self.table.create_entity(entity=entity2) entities = list(self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey']))) + "PartitionKey eq '{}'".format(entity['PartitionKey']))) # Assert assert len(entities) == 2 @@ -1461,7 +1461,7 @@ def test_query_entities_each_page(self, tables_storage_account_name, tables_prim entity_count = 0 page_count = 0 - for entity_page in self.table.query_entities(filter=query_filter, results_per_page=2).by_page(): + for entity_page in self.table.query_entities(query_filter, results_per_page=2).by_page(): temp_count = 0 for ent in entity_page: @@ -1537,7 +1537,7 @@ def test_query_entities_with_filter(self, tables_storage_account_name, tables_pr # Act entities = list(self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey), + "PartitionKey eq '{}'".format(entity.PartitionKey), results_per_page=1)) # Assert @@ -1709,7 +1709,7 @@ def test_sas_query(self, tables_storage_account_name, tables_primary_storage_acc ) table = service.get_table_client(self.table_name) entities = list(table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey']))) + "PartitionKey eq '{}'".format(entity['PartitionKey']))) # Assert assert len(entities) == 1 @@ -1921,7 +1921,7 @@ def test_sas_upper_case_table_name(self, tables_storage_account_name, tables_pri ) table = service.get_table_client(self.table_name) entities = list(table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey))) + "PartitionKey eq '{}'".format(entity.PartitionKey))) # Assert assert len(entities) == 1 @@ -1961,7 +1961,7 @@ def test_sas_signed_identifier(self, tables_storage_account_name, tables_primary ) table = service.get_table_client(self.table_name) entities = list(table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey))) + "PartitionKey eq '{}'".format(entity.PartitionKey))) # Assert assert len(entities) == 1 diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity_async.py b/sdk/tables/azure-data-tables/tests/test_table_entity_async.py index 5d06793cac33..a4bfbe15c478 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity_async.py @@ -299,7 +299,7 @@ async def test_url_encoding_at_symbol(self, tables_storage_account_name, tables_ await self.table.create_entity(entity) f = u"RowKey eq '{}'".format(entity["RowKey"]) - entities = self.table.query_entities(filter=f) + entities = self.table.query_entities(f) count = 0 async for e in entities: @@ -311,7 +311,7 @@ async def test_url_encoding_at_symbol(self, tables_storage_account_name, tables_ assert count == 1 - entities = self.table.query_entities(filter=f) + entities = self.table.query_entities(f) count = 0 async for e in entities: count += 1 @@ -1045,7 +1045,7 @@ async def test_unicode_property_value(self, tables_storage_account_name, tables_ await self.table.create_entity(entity=entity2) entities = [] async for e in self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey'])): + "PartitionKey eq '{}'".format(entity['PartitionKey'])): entities.append(e) # Assert @@ -1071,7 +1071,7 @@ async def test_unicode_property_name(self, tables_storage_account_name, tables_p await self.table.create_entity(entity=entity2) entities = [] async for e in self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey'])): + "PartitionKey eq '{}'".format(entity['PartitionKey'])): entities.append(e) # Assert @@ -1264,7 +1264,7 @@ async def test_query_entities_each_page(self, tables_storage_account_name, table entity_count = 0 page_count = 0 - async for entity_page in self.table.query_entities(filter=query_filter, results_per_page=2).by_page(): + async for entity_page in self.table.query_entities(query_filter, results_per_page=2).by_page(): temp_count = 0 async for ent in entity_page: @@ -1383,7 +1383,7 @@ async def test_query_user_filter(self, tables_storage_account_name, tables_prima entity = await self._insert_two_opposite_entities() # Act - entities = self.table.query_entities(filter="married eq @my_param", parameters={'my_param': True}) + entities = self.table.query_entities("married eq @my_param", parameters={'my_param': True}) assert entities is not None length = 0 @@ -1407,7 +1407,7 @@ async def test_query_user_filter_multiple_params(self, tables_storage_account_na 'my_param': True, 'rk': entity['RowKey'] } - entities = self.table.query_entities(filter="married eq @my_param and RowKey eq @rk", parameters=parameters) + entities = self.table.query_entities("married eq @my_param and RowKey eq @rk", parameters=parameters) length = 0 assert entities is not None @@ -1430,7 +1430,7 @@ async def test_query_user_filter_integers(self, tables_storage_account_name, tab parameters = { 'my_param': 40, } - entities = self.table.query_entities(filter="age lt @my_param", parameters=parameters) + entities = self.table.query_entities("age lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1453,7 +1453,7 @@ async def test_query_user_filter_floats(self, tables_storage_account_name, table parameters = { 'my_param': entity['ratio'] + 1, } - entities = self.table.query_entities(filter="ratio lt @my_param", parameters=parameters) + entities = self.table.query_entities("ratio lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1476,7 +1476,7 @@ async def test_query_user_filter_datetimes(self, tables_storage_account_name, ta parameters = { 'my_param': entity['birthday'], } - entities = self.table.query_entities(filter="birthday eq @my_param", parameters=parameters) + entities = self.table.query_entities("birthday eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1499,7 +1499,7 @@ async def test_query_user_filter_guids(self, tables_storage_account_name, tables parameters = { 'my_param': entity['clsid'] } - entities = self.table.query_entities(filter="clsid eq @my_param", parameters=parameters) + entities = self.table.query_entities("clsid eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1578,7 +1578,7 @@ async def test_query_entities_with_filter(self, tables_storage_account_name, tab # Act entities = [] async for t in self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey)): + "PartitionKey eq '{}'".format(entity.PartitionKey)): entities.append(t) # Assert @@ -1605,7 +1605,7 @@ async def test_query_invalid_filter(self, tables_storage_account_name, tables_pr await self.table.create_entity(base_entity) # Act with pytest.raises(HttpResponseError): - async for t in self.table.query_entities(filter="aaa bbb ccc"): + async for t in self.table.query_entities("aaa bbb ccc"): _ = t finally: await self._tear_down() @@ -1712,7 +1712,7 @@ async def test_sas_query(self, tables_storage_account_name, tables_primary_stora table = service.get_table_client(self.table_name) entities = [] async for t in table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey'])): + "PartitionKey eq '{}'".format(entity['PartitionKey'])): entities.append(t) # Assert @@ -1914,7 +1914,7 @@ async def test_sas_upper_case_table_name(self, tables_storage_account_name, tabl table = service.get_table_client(self.table_name) entities = [] async for t in table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey'])): + "PartitionKey eq '{}'".format(entity['PartitionKey'])): entities.append(t) # Assert @@ -1956,7 +1956,7 @@ async def test_sas_signed_identifier(self, tables_storage_account_name, tables_p table = service.get_table_client(table_name=self.table_name) entities = [] async for t in table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey)): + "PartitionKey eq '{}'".format(entity.PartitionKey)): entities.append(t) # Assert diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py index 83368f5f94c9..96feec985379 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py @@ -310,7 +310,7 @@ def test_url_encoding_at_symbol(self, tables_cosmos_account_name, tables_primary self.table.create_entity(entity) f = u"RowKey eq '{}'".format(entity["RowKey"]) - entities = self.table.query_entities(filter=f) + entities = self.table.query_entities(f) count = 0 for e in entities: assert e.PartitionKey == entity[u"PartitionKey"] @@ -322,7 +322,7 @@ def test_url_encoding_at_symbol(self, tables_cosmos_account_name, tables_primary assert count == 1 count = 0 - for e in self.table.query_entities(filter=f): + for e in self.table.query_entities(f): count += 1 assert count == 0 finally: @@ -354,7 +354,7 @@ def test_query_user_filter(self, tables_cosmos_account_name, tables_primary_cosm entity = self._insert_random_entity() # Act - entities = self.table.query_entities(filter="married eq @my_param", parameters={'my_param': True}) + entities = self.table.query_entities("married eq @my_param", parameters={'my_param': True}) assert entities is not None length = 0 @@ -379,7 +379,7 @@ def test_query_user_filter_multiple_params(self, tables_cosmos_account_name, tab 'my_param': True, 'rk': entity['RowKey'] } - entities = self.table.query_entities(filter="married eq @my_param and RowKey eq @rk", parameters=parameters) + entities = self.table.query_entities("married eq @my_param and RowKey eq @rk", parameters=parameters) assert entities is not None length = 0 @@ -403,7 +403,7 @@ def test_query_user_filter_integers(self, tables_cosmos_account_name, tables_pri parameters = { 'my_param': 40, } - entities = self.table.query_entities(filter="age lt @my_param", parameters=parameters) + entities = self.table.query_entities("age lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -427,7 +427,7 @@ def test_query_user_filter_floats(self, tables_cosmos_account_name, tables_prima parameters = { 'my_param': entity['ratio'] + 1.0, } - entities = self.table.query_entities(filter="ratio lt @my_param", parameters=parameters) + entities = self.table.query_entities("ratio lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -451,7 +451,7 @@ def test_query_user_filter_datetimes(self, tables_cosmos_account_name, tables_pr parameters = { 'my_param': entity['birthday'], } - entities = self.table.query_entities(filter="birthday eq @my_param", parameters=parameters) + entities = self.table.query_entities("birthday eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -475,7 +475,7 @@ def test_query_user_filter_guids(self, tables_cosmos_account_name, tables_primar parameters = { 'my_param': entity['clsid'] } - entities = self.table.query_entities(filter="clsid eq @my_param", parameters=parameters) + entities = self.table.query_entities("clsid eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -505,7 +505,7 @@ def test_query_invalid_filter(self, tables_cosmos_account_name, tables_primary_c self.table.create_entity(base_entity) # Act with pytest.raises(HttpResponseError): - resp = self.table.query_entities(filter="aaa bbb ccc") + resp = self.table.query_entities("aaa bbb ccc") for row in resp: _ = row @@ -1235,7 +1235,7 @@ def test_unicode_property_value(self, tables_cosmos_account_name, tables_primary self.table.create_entity(entity=entity1) self.table.create_entity(entity=entity2) entities = list(self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey']))) + "PartitionKey eq '{}'".format(entity['PartitionKey']))) # Assert assert len(entities) == 2 @@ -1260,7 +1260,7 @@ def test_unicode_property_name(self, tables_cosmos_account_name, tables_primary_ self.table.create_entity(entity=entity1) self.table.create_entity(entity=entity2) entities = list(self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey']))) + "PartitionKey eq '{}'".format(entity['PartitionKey']))) # Assert assert len(entities) == 2 @@ -1454,7 +1454,7 @@ def test_query_entities_each_page(self, tables_cosmos_account_name, tables_prima entity_count = 0 page_count = 0 - for entity_page in self.table.query_entities(filter=query_filter, results_per_page=2).by_page(): + for entity_page in self.table.query_entities(query_filter, results_per_page=2).by_page(): temp_count = 0 for ent in entity_page: @@ -1534,7 +1534,7 @@ def test_query_entities_with_filter(self, tables_cosmos_account_name, tables_pri # Act entities = list(self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey))) + "PartitionKey eq '{}'".format(entity.PartitionKey))) # Assert assert len(entities) == 1 diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py index 6ea7f3bd126a..196a13dfe0f0 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py @@ -300,7 +300,7 @@ async def test_url_encoding_at_symbol(self, tables_cosmos_account_name, tables_p await self.table.create_entity(entity) f = u"RowKey eq '{}'".format(entity["RowKey"]) - entities = self.table.query_entities(filter=f) + entities = self.table.query_entities(f) count = 0 async for e in entities: @@ -312,7 +312,7 @@ async def test_url_encoding_at_symbol(self, tables_cosmos_account_name, tables_p assert count == 1 - entities = self.table.query_entities(filter=f) + entities = self.table.query_entities(f) count = 0 async for e in entities: count += 1 @@ -992,7 +992,7 @@ async def test_unicode_property_value(self, tables_cosmos_account_name, tables_p await self.table.create_entity(entity=entity2) entities = [] async for e in self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey'])): + "PartitionKey eq '{}'".format(entity['PartitionKey'])): entities.append(e) # Assert @@ -1018,7 +1018,7 @@ async def test_unicode_property_name(self, tables_cosmos_account_name, tables_pr await self.table.create_entity(entity=entity2) entities = [] async for e in self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity['PartitionKey'])): + "PartitionKey eq '{}'".format(entity['PartitionKey'])): entities.append(e) # Assert @@ -1204,7 +1204,7 @@ async def test_query_entities_each_page(self, tables_cosmos_account_name, tables entity_count = 0 page_count = 0 - async for entity_page in self.table.query_entities(filter=query_filter, results_per_page=2).by_page(): + async for entity_page in self.table.query_entities(query_filter, results_per_page=2).by_page(): temp_count = 0 async for ent in entity_page: @@ -1228,7 +1228,7 @@ async def test_query_user_filter(self, tables_cosmos_account_name, tables_primar entity = await self._insert_two_opposite_entities() # Act - entities = self.table.query_entities(filter="married eq @my_param", parameters={'my_param': True}) + entities = self.table.query_entities("married eq @my_param", parameters={'my_param': True}) length = 0 assert entities is not None @@ -1253,7 +1253,7 @@ async def test_query_user_filter_multiple_params(self, tables_cosmos_account_nam 'my_param': True, 'rk': entity['RowKey'] } - entities = self.table.query_entities(filter="married eq @my_param and RowKey eq @rk", parameters=parameters) + entities = self.table.query_entities("married eq @my_param and RowKey eq @rk", parameters=parameters) length = 0 assert entities is not None @@ -1276,7 +1276,7 @@ async def test_query_user_filter_integers(self, tables_cosmos_account_name, tabl parameters = { 'my_param': 40, } - entities = self.table.query_entities(filter="age lt @my_param", parameters=parameters) + entities = self.table.query_entities("age lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1299,7 +1299,7 @@ async def test_query_user_filter_floats(self, tables_cosmos_account_name, tables parameters = { 'my_param': entity['ratio'] + 1.0, } - entities = self.table.query_entities(filter="ratio lt @my_param", parameters=parameters) + entities = self.table.query_entities("ratio lt @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1322,7 +1322,7 @@ async def test_query_user_filter_datetimes(self, tables_cosmos_account_name, tab parameters = { 'my_param': entity['birthday'], } - entities = self.table.query_entities(filter="birthday eq @my_param", parameters=parameters) + entities = self.table.query_entities("birthday eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1345,7 +1345,7 @@ async def test_query_user_filter_guids(self, tables_cosmos_account_name, tables_ parameters = { 'my_param': entity['clsid'] } - entities = self.table.query_entities(filter="clsid eq @my_param", parameters=parameters) + entities = self.table.query_entities("clsid eq @my_param", parameters=parameters) length = 0 assert entities is not None @@ -1424,7 +1424,7 @@ async def test_query_entities_with_filter(self, tables_cosmos_account_name, tabl # Act entities = [] async for t in self.table.query_entities( - filter="PartitionKey eq '{}'".format(entity.PartitionKey)): + "PartitionKey eq '{}'".format(entity.PartitionKey)): entities.append(t) # Assert @@ -1544,7 +1544,7 @@ async def test_query_invalid_filter(self, tables_cosmos_account_name, tables_pri await self.table.create_entity(base_entity) # Act with pytest.raises(HttpResponseError): - async for t in self.table.query_entities(filter="aaa bbb ccc"): + async for t in self.table.query_entities("aaa bbb ccc"): _ = t finally: await self._tear_down() diff --git a/shared_requirements.txt b/shared_requirements.txt index 186b7b740268..3cc4d69c1ac9 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -127,7 +127,7 @@ avro<2.0.0,>=1.10.0 #override azure-core-tracing-opencensus azure-core<2.0.0,>=1.0.0 #override azure-core-tracing-opentelemetry azure-core<2.0.0,>=1.13.0 #override azure-cosmos azure-core<2.0.0,>=1.0.0 -#override azure-data-tables azure-core<2.0.0,>=1.10.0 +#override azure-data-tables azure-core<2.0.0,>=1.13.0 #override azure-eventhub azure-core<2.0.0,>=1.13.0 #override azure-identity azure-core<2.0.0,>=1.0.0 #override azure-keyvault-administration msrest>=0.6.21