From 61ba8d1365e873acd8e464a0ffda85415cff1e68 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 13 Aug 2021 13:14:17 -0400 Subject: [PATCH 01/56] initial commit --- sdk/cosmos/azure-cosmos/azure/cosmos/_base.py | 2 +- .../azure-cosmos/azure/cosmos/aio/__init__.py | 0 .../azure/cosmos/aio/_asynchronous_request.py | 186 ++ .../aio/_cosmos_client_connection_async.py | 2667 +++++++++++++++++ .../aio/_global_endpoint_manager_async.py | 174 ++ .../azure/cosmos/aio/container_async.py | 802 +++++ .../azure/cosmos/aio/cosmos_client_async.py | 456 +++ .../azure/cosmos/aio/database_async.py | 768 +++++ .../azure-cosmos/azure/cosmos/container.py | 58 + sdk/cosmos/azure-cosmos/samples/heroes.py | 97 + .../azure-cosmos/samples/simon_testfile.py | 169 ++ 11 files changed, 5378 insertions(+), 1 deletion(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/heroes.py create mode 100644 sdk/cosmos/azure-cosmos/samples/simon_testfile.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 0c06891a549c..46b7d46918d5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -175,7 +175,7 @@ def GetHeaders( # pylint: disable=too-many-statements,too-many-branches if options.get("consistencyLevel"): consistency_level = options["consistencyLevel"] headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level - elif default_client_consistency_level is not None: + elif default_client_consistency_level is not None: # Why not just check for `default_client_consistency_level` consistency_level = default_client_consistency_level headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py new file mode 100644 index 000000000000..986cb130b9ae --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -0,0 +1,186 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Asynchronous request in the Azure Cosmos database service. +""" + +import json +import time + +from six.moves.urllib.parse import urlparse +import six +from azure.core.exceptions import DecodeError # type: ignore + +from .. import exceptions +from .. import http_constants +from .. import _retry_utility +from .._synchronized_request import _request_body_from_data + + +async def _Request(global_endpoint_manager, request_params, connection_policy, pipeline_client, request, **kwargs): + """Makes one http request using the requests module. + + :param _GlobalEndpointManager global_endpoint_manager: + :param dict request_params: + contains the resourceType, operationType, endpointOverride, + useWriteEndpoint, useAlternateWriteEndpoint information + :param documents.ConnectionPolicy connection_policy: + :param azure.core.PipelineClient pipeline_client: + Pipeline client to process the request + :param azure.core.HttpRequest request: + The request object to send through the pipeline + :return: tuple of (result, headers) + :rtype: tuple of (dict, dict) + + """ + # pylint: disable=protected-access + + connection_timeout = connection_policy.RequestTimeout + connection_timeout = kwargs.pop("connection_timeout", connection_timeout / 1000.0) + + # Every request tries to perform a refresh + client_timeout = kwargs.get('timeout') + start_time = time.time() + global_endpoint_manager.refresh_endpoint_list(None, **kwargs) + if client_timeout is not None: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise exceptions.CosmosClientTimeoutError() + + if request_params.endpoint_override: + base_url = request_params.endpoint_override + else: + base_url = global_endpoint_manager.resolve_service_endpoint(request_params) + if base_url != pipeline_client._base_url: + request.url = request.url.replace(pipeline_client._base_url, base_url) + + parse_result = urlparse(request.url) + + # The requests library now expects header values to be strings only starting 2.11, + # and will raise an error on validation if they are not, so casting all header values to strings. + request.headers.update({header: str(value) for header, value in request.headers.items()}) + + # We are disabling the SSL verification for local emulator(localhost/127.0.0.1) or if the user + # has explicitly specified to disable SSL verification. + is_ssl_enabled = ( + parse_result.hostname != "localhost" + and parse_result.hostname != "127.0.0.1" + and not connection_policy.DisableSSLVerification + ) + + if connection_policy.SSLConfiguration or "connection_cert" in kwargs: + ca_certs = connection_policy.SSLConfiguration.SSLCaCerts + cert_files = (connection_policy.SSLConfiguration.SSLCertFile, connection_policy.SSLConfiguration.SSLKeyFile) + response = await _PipelineRunFunction( + pipeline_client, + request, + connection_timeout=connection_timeout, + connection_verify=kwargs.pop("connection_verify", ca_certs), + connection_cert=kwargs.pop("connection_cert", cert_files), + **kwargs + ) + else: + response = await _PipelineRunFunction( + pipeline_client, + request, + connection_timeout=connection_timeout, + # If SSL is disabled, verify = false + connection_verify=kwargs.pop("connection_verify", is_ssl_enabled), + **kwargs + ) + + response = response.http_response + headers = dict(response.headers) + + data = response.body() + if data and not six.PY2: + # python 3 compatible: convert data from byte to unicode string + data = data.decode("utf-8") + + if response.status_code == 404: + raise exceptions.CosmosResourceNotFoundError(message=data, response=response) + if response.status_code == 409: + raise exceptions.CosmosResourceExistsError(message=data, response=response) + if response.status_code == 412: + raise exceptions.CosmosAccessConditionFailedError(message=data, response=response) + if response.status_code >= 400: + raise exceptions.CosmosHttpResponseError(message=data, response=response) + + result = None + if data: + try: + result = json.loads(data) + except Exception as e: + raise DecodeError( + message="Failed to decode JSON data: {}".format(e), + response=response, + error=e) + + return result, headers + + +async def _PipelineRunFunction(pipeline_client, request, **kwargs): + # pylint: disable=protected-access + + return await pipeline_client._pipeline.run(request, **kwargs) + +async def AsynchronousRequest( + client, + request_params, + global_endpoint_manager, + connection_policy, + pipeline_client, + request, + request_data, + **kwargs +): + """Performs one asynchronous http request according to the parameters. + + :param object client: Document client instance + :param dict request_params: + :param _GlobalEndpointManager global_endpoint_manager: + :param documents.ConnectionPolicy connection_policy: + :param azure.core.PipelineClient pipeline_client: PipelineClient to process the request. + :param str method: + :param str path: + :param (str, unicode, file-like stream object, dict, list or None) request_data: + :param dict query_params: + :param dict headers: + :return: tuple of (result, headers) + :rtype: tuple of (dict dict) + """ + request.data = _request_body_from_data(request_data) + if request.data and isinstance(request.data, six.string_types): + request.headers[http_constants.HttpHeaders.ContentLength] = len(request.data) + elif request.data is None: + request.headers[http_constants.HttpHeaders.ContentLength] = 0 + + # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries + return await _retry_utility.Execute( + client, + global_endpoint_manager, + _Request, + request_params, + connection_policy, + pipeline_client, + request, + **kwargs + ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py new file mode 100644 index 000000000000..c23ac5bde52e --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -0,0 +1,2667 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# disable (too-many-lines) check +# pylint: disable=C0302 + +"""Document client class for the Azure Cosmos database service. +""" +# https://github.com/PyCQA/pylint/issues/3112 +# Currently pylint is locked to 2.3.3 and this is fixed in 2.4.4 +from typing import Dict, Any, Optional # pylint: disable=unused-import +import six +import asyncio +from urllib3.util.retry import Retry +from azure.core.async_paging import AsyncItemPaged +from azure.core import AsyncPipelineClient +from azure.core import PipelineClient +from azure.core.exceptions import raise_with_traceback # type: ignore +from azure.core.pipeline.policies import ( + AsyncHTTPPolicy, + ContentDecodePolicy, + HeadersPolicy, + UserAgentPolicy, + NetworkTraceLoggingPolicy, + CustomHookPolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + ProxyPolicy) + +from .. import _base as base +from .. import documents +from ..documents import ConnectionPolicy +from .. import _constants as constants +from .. import http_constants +from .. import _query_iterable as query_iterable +from .. import _runtime_constants as runtime_constants +from .. import _request_object +from .. import _synchronized_request as synchronized_request +from . import _asynchronous_request as asynchronous_request +from .. import _global_endpoint_manager as global_endpoint_manager +from . import _global_endpoint_manager_async as global_endpoint_manager_async +from .._routing import routing_map_provider +from .._retry_utility import ConnectionRetryPolicy +from .. import _session +from .. import _utils +from ..partition_key import _Undefined, _Empty +from .._cosmos_client_connection import CosmosClientConnection as BaseCosmosConnection + +# pylint: disable=protected-access + + +class CosmosClientConnection(BaseCosmosConnection, object): # pylint: disable=too-many-public-methods,too-many-instance-attributes + """Represents a document client. + + Provides a client-side logical representation of the Azure Cosmos + service. This client is used to configure and execute requests against the + service. + + The service client encapsulates the endpoint and credentials used to access + the Azure Cosmos service. + """ + + class _QueryCompatibilityMode: + Default = 0 + Query = 1 + SqlQuery = 2 + + # default number precisions + _DefaultNumberHashPrecision = 3 + _DefaultNumberRangePrecision = -1 + + # default string precision + _DefaultStringHashPrecision = 3 + _DefaultStringRangePrecision = -1 + + def __init__( + self, + url_connection, # type: str + auth, # type: Dict[str, Any] + connection_policy=None, # type: Optional[ConnectionPolicy] + consistency_level=documents.ConsistencyLevel.Session, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """ + :param str url_connection: + The URL for connecting to the DB server. + :param dict auth: + Contains 'masterKey' or 'resourceTokens', where + auth['masterKey'] is the default authorization key to use to + create the client, and auth['resourceTokens'] is the alternative + authorization key. + :param documents.ConnectionPolicy connection_policy: + The connection policy for the client. + :param documents.ConsistencyLevel consistency_level: + The default consistency policy for client operations. + + """ + # super(CosmosClientConnection, self).__init__( + # url_connection=url_connection, + # auth=auth, + # connection_policy=connection_policy, + # consistency_level=consistency_level, + # **kwargs) + self.url_connection = url_connection + + self.master_key = None + self.resource_tokens = None + if auth is not None: + self.master_key = auth.get("masterKey") + self.resource_tokens = auth.get("resourceTokens") + + if auth.get("permissionFeed"): + self.resource_tokens = {} + for permission_feed in auth["permissionFeed"]: + resource_parts = permission_feed["resource"].split("/") + id_ = resource_parts[-1] + self.resource_tokens[id_] = permission_feed["_token"] + + self.connection_policy = connection_policy or ConnectionPolicy() + + self.partition_resolvers = {} # type: Dict[str, Any] + + self.partition_key_definition_cache = {} # type: Dict[str, Any] + + self.default_headers = { + http_constants.HttpHeaders.CacheControl: "no-cache", + http_constants.HttpHeaders.Version: http_constants.Versions.CurrentVersion, + # For single partition query with aggregate functions we would try to accumulate the results on the SDK. + # We need to set continuation as not expected. + http_constants.HttpHeaders.IsContinuationExpected: False, + } + + if consistency_level is not None: + self.default_headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + + # Keeps the latest response headers from server. + self.last_response_headers = None + + if consistency_level == documents.ConsistencyLevel.Session: + # create a session - this is maintained only if the default consistency level + # on the client is set to session, or if the user explicitly sets it as a property + # via setter + self.session = _session.Session(self.url_connection) + else: + self.session = None # type: ignore + + self._useMultipleWriteLocations = False + self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) + + retry_policy = None + if isinstance(self.connection_policy.ConnectionRetryConfiguration, AsyncHTTPPolicy): + retry_policy = self.connection_policy.ConnectionRetryConfiguration + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, int): + retry_policy = ConnectionRetryPolicy(total=self.connection_policy.ConnectionRetryConfiguration) + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, Retry): + # Convert a urllib3 retry policy to a Pipeline policy + retry_policy = ConnectionRetryPolicy( + retry_total=self.connection_policy.ConnectionRetryConfiguration.total, + retry_connect=self.connection_policy.ConnectionRetryConfiguration.connect, + retry_read=self.connection_policy.ConnectionRetryConfiguration.read, + retry_status=self.connection_policy.ConnectionRetryConfiguration.status, + retry_backoff_max=self.connection_policy.ConnectionRetryConfiguration.BACKOFF_MAX, + retry_on_status_codes=list(self.connection_policy.ConnectionRetryConfiguration.status_forcelist), + retry_backoff_factor=self.connection_policy.ConnectionRetryConfiguration.backoff_factor + ) + else: + TypeError("Unsupported retry policy. Must be an azure.cosmos.ConnectionRetryPolicy, int, or urllib3.Retry") + + proxies = kwargs.pop('proxies', {}) + if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: + host = self.connection_policy.ProxyConfiguration.Host + url = six.moves.urllib.parse.urlparse(host) + proxy = host if url.port else host + ":" + str(self.connection_policy.ProxyConfiguration.Port) + proxies.update({url.scheme : proxy}) + + policies = [ + HeadersPolicy(**kwargs), + ProxyPolicy(proxies=proxies), + UserAgentPolicy(base_user_agent=_utils.get_user_agent(), **kwargs), + ContentDecodePolicy(), + retry_policy, + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + + # print(asyncio.get_event_loop()) + database_account = self._global_endpoint_manager._GetDatabaseAccount(**kwargs) + self._global_endpoint_manager.force_refresh(database_account) + + transport = kwargs.pop("transport", None) + self.pipeline_client = AsyncPipelineClient(base_url=url_connection, transport=transport, policies=policies) + + # Query compatibility mode. + # Allows to specify compatibility mode used by client when making query requests. Should be removed when + # application/sql is no longer supported. + self._query_compatibility_mode = CosmosClientConnection._QueryCompatibilityMode.Default + + # Routing map provider + self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) + + @property + def Session(self): + """Gets the session object from the client. """ + return self.session + + @Session.setter + def Session(self, session): + """Sets a session object on the document client. + + This will override the existing session + """ + self.session = session + + @property + def WriteEndpoint(self): + """Gets the curent write endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_write_endpoint() + + @property + def ReadEndpoint(self): + """Gets the curent read endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_read_endpoint() + + def RegisterPartitionResolver(self, database_link, partition_resolver): + """Registers the partition resolver associated with the database link + + :param str database_link: + Database Self Link or ID based link. + :param object partition_resolver: + An instance of PartitionResolver. + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + if partition_resolver is None: + raise ValueError("partition_resolver is None.") + + self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver} + + def GetPartitionResolver(self, database_link): + """Gets the partition resolver associated with the database link + + :param str database_link: + Database self link or ID based link. + + :return: + An instance of PartitionResolver. + :rtype: object + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) + + async def CreateDatabase(self, database, options=None, **kwargs): + """Creates a database. + + :param dict database: + The Azure Cosmos database to create. + :param dict options: + The request options for the request. + + :return: + The Database that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(database) + path = "/dbs" + return await self.Create(database, path, "dbs", None, None, options, **kwargs) + + def ReadDatabase(self, database_link, options=None, **kwargs): + """Reads a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: + The Database that was read. + :rtype: dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return self.Read(path, "dbs", database_id, None, options, **kwargs) + + def ReadDatabases(self, options=None, **kwargs): + """Reads all databases. + + :param dict options: + The request options for the request. + + :return: + Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryDatabases(None, options, **kwargs) + + def QueryDatabases(self, query, options=None, **kwargs): + """Queries databases. + + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + def fetch_fn(options): + return ( + self.__QueryFeed( + "/dbs", "dbs", "", lambda r: r["Databases"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadContainers(self, database_link, options=None, **kwargs): + """Reads all collections in a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryContainers(database_link, None, options, **kwargs) + + def QueryContainers(self, database_link, query, options=None, **kwargs): + """Queries collections in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "colls", database_id, lambda r: r["DocumentCollections"], + lambda _, body: body, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateContainer(self, database_link, collection, options=None, **kwargs): + """Creates a collection in a database. + + :param str database_link: + The link to the database. + :param dict collection: + The Azure Cosmos collection to create. + :param dict options: + The request options for the request. + + :return: The Collection that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return self.Create(collection, path, "colls", database_id, None, options, **kwargs) + + def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): + """Replaces a collection and return it. + + :param str collection_link: + The link to the collection entity. + :param dict collection: + The collection to be used. + :param dict options: + The request options for the request. + + :return: + The new Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) + + def ReadContainer(self, collection_link, options=None, **kwargs): + """Reads a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The read Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return self.Read(path, "colls", collection_id, None, options, **kwargs) + + def CreateUser(self, database_link, user, options=None, **kwargs): + """Creates a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to create. + :param dict options: + The request options for the request. + + :return: + The created User. + :rtype: + dict + + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return self.Create(user, path, "users", database_id, None, options, **kwargs) + + def UpsertUser(self, database_link, user, options=None, **kwargs): + """Upserts a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted User. + :rtype: dict + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return self.Upsert(user, path, "users", database_id, None, options, **kwargs) + + def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return database_id, path + + def ReadUser(self, user_link, options=None, **kwargs): + """Reads a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The read User. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return self.Read(path, "users", user_id, None, options, **kwargs) + + def ReadUsers(self, database_link, options=None, **kwargs): + """Reads all users in a database. + + :params str database_link: + The link to the database. + :params dict options: + The request options for the request. + :return: + Query iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUsers(database_link, None, options, **kwargs) + + def QueryUsers(self, database_link, query, options=None, **kwargs): + """Queries users in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "users", database_id, lambda r: r["Users"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def DeleteDatabase(self, database_link, options=None, **kwargs): + """Deletes a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: + The deleted Database. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) + + def CreatePermission(self, user_link, permission, options=None, **kwargs): + """Creates a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to create. + :param dict options: + The request options for the request. + + :return: + The created Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return self.Create(permission, path, "permissions", user_id, None, options, **kwargs) + + def UpsertPermission(self, user_link, permission, options=None, **kwargs): + """Upserts a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) + + def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return path, user_id + + def ReadPermission(self, permission_link, options=None, **kwargs): + """Reads a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The read permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return self.Read(path, "permissions", permission_id, None, options, **kwargs) + + def ReadPermissions(self, user_link, options=None, **kwargs): + """Reads all permissions for a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryPermissions(user_link, None, options, **kwargs) + + def QueryPermissions(self, user_link, query, options=None, **kwargs): + """Queries permissions for a user. + + :param str user_link: + The link to the user entity. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReplaceUser(self, user_link, user, options=None, **kwargs): + """Replaces a user and return it. + + :param str user_link: + The link to the user entity. + :param dict user: + :param dict options: + The request options for the request. + + :return: + The new User. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return self.Replace(user, path, "users", user_id, None, options, **kwargs) + + def DeleteUser(self, user_link, options=None, **kwargs): + """Deletes a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The deleted user. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return self.DeleteResource(path, "users", user_id, None, options, **kwargs) + + def ReplacePermission(self, permission_link, permission, options=None, **kwargs): + """Replaces a permission and return it. + + :param str permission_link: + The link to the permission. + :param dict permission: + :param dict options: + The request options for the request. + + :return: + The new Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) + + def DeletePermission(self, permission_link, options=None, **kwargs): + """Deletes a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The deleted Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) + + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): + """Reads all documents in a collection. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) + + def QueryItems( + self, + database_or_container_link, + query, + options=None, + partition_key=None, + response_hook=None, + **kwargs + ): + """Queries documents in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key: + Partition key for the query(default value None) + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) + + if options is None: + options = {} + + if base.IsDatabaseLink(database_or_container_link): + return AsyncItemPaged( + self, + query, + options, + database_link=database_or_container_link, + partition_key=partition_key, + page_iterator_class=query_iterable.QueryIterable + ) + + path = base.GetPathFromLink(database_or_container_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + collection_link=database_or_container_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): + """Queries documents change feed in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + options may also specify partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + + partition_key_range_id = None + if options is not None and "partitionKeyRangeId" in options: + partition_key_range_id = options["partitionKeyRangeId"] + + return self._QueryChangeFeed( + collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs + ) + + def _QueryChangeFeed( + self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, **kwargs + ): + """Queries change feed of a resource in a collection. + + :param str collection_link: + The link to the document collection. + :param str resource_type: + The type of the resource. + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + options["changeFeed"] = True + + resource_key_map = {"Documents": "docs"} + + # For now, change feed only supports Documents and Partition Key Range resouce type + if resource_type not in resource_key_map: + raise NotImplementedError(resource_type + " change feed query is not supported.") + + resource_key = resource_key_map[resource_type] + path = base.GetPathFromLink(collection_link, resource_key) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, + resource_key, + collection_id, + lambda r: r[resource_type], + lambda _, b: b, + None, + options, + partition_key_range_id, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + None, + options, + fetch_function=fetch_fn, + collection_link=collection_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): + """Reads Partition Key Ranges. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) + + def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): + """Queries Partition Key Ranges in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "pkranges") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateItem(self, database_or_container_link, document, options=None, **kwargs): + """Creates a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to create. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The created Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable default in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) + + return self.Create(document, path, "docs", collection_id, None, options, **kwargs) + + def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): + """Upserts a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to upsert. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The upserted Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) + + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + return self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) + + PartitionResolverErrorMessage = ( + "Couldn't find any partition resolvers for the database link provided. " + + "Ensure that the link you used when registering the partition resolvers " + + "matches the link provided or you need to register both types of database " + + "link(self link as well as ID based link)." + ) + + # Gets the collection id and path for the document + def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): + + if not database_or_container_link: + raise ValueError("database_or_container_link is None or empty.") + + if document is None: + raise ValueError("document is None.") + + CosmosClientConnection.__ValidateResource(document) + document = document.copy() + if not document.get("id") and not options.get("disableAutomaticIdGeneration"): + document["id"] = base.GenerateGuidId() + + collection_link = database_or_container_link + + if base.IsDatabaseLink(database_or_container_link): + partition_resolver = self.GetPartitionResolver(database_or_container_link) + + if partition_resolver is not None: + collection_link = partition_resolver.ResolveForCreate(document) + else: + raise ValueError(CosmosClientConnection.PartitionResolverErrorMessage) + + path = base.GetPathFromLink(collection_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, document, path + + def ReadItem(self, document_link, options=None, **kwargs): + """Reads a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The read Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return self.Read(path, "docs", document_id, None, options, **kwargs) + + async def ReadItemAsync(self, document_link, options=None, **kwargs): + """Reads a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The read Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return await self.ReadAsync(path, "docs", document_id, None, options, **kwargs) + + def ReadTriggers(self, collection_link, options=None, **kwargs): + """Reads all triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryTriggers(collection_link, None, options, **kwargs) + + def QueryTriggers(self, collection_link, query, options=None, **kwargs): + """Queries triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): + """Creates a trigger in a collection. + + :param str collection_link: + The link to the document collection. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The created Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) + return self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) + + def UpsertTrigger(self, collection_link, trigger, options=None, **kwargs): + """Upserts a trigger in a collection. + + :param str collection_link: + The link to the document collection. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The upserted Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) + return self.Upsert(trigger, path, "triggers", collection_id, None, options, **kwargs) + + def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger.pop("serverScript", "")) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, trigger + + def ReadTrigger(self, trigger_link, options=None, **kwargs): + """Reads a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The read Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return self.Read(path, "triggers", trigger_id, None, options, **kwargs) + + def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): + """Reads all user-defined functions in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) + + def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): + """Queries user-defined functions in a collection. + + :param str collection_link: + The link to the collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): + """Creates a user-defined function in a collection. + + :param str collection_link: + The link to the collection. + :param str udf: + :param dict options: + The request options for the request. + + :return: + The created UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) + return self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) + + def UpsertUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): + """Upserts a user-defined function in a collection. + + :param str collection_link: + The link to the collection. + :param str udf: + :param dict options: + The request options for the request. + + :return: + The upserted UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) + return self.Upsert(udf, path, "udfs", collection_id, None, options, **kwargs) + + def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf.pop("serverScript", "")) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, udf + + def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Reads a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The read UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return self.Read(path, "udfs", udf_id, None, options, **kwargs) + + def ReadStoredProcedures(self, collection_link, options=None, **kwargs): + """Reads all store procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryStoredProcedures(collection_link, None, options, **kwargs) + + def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): + """Queries stored procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "sprocs", collection_id, lambda r: r["StoredProcedures"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): + """Creates a stored procedure in a collection. + + :param str collection_link: + The link to the document collection. + :param str sproc: + :param dict options: + The request options for the request. + + :return: + The created Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) + return self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) + + def UpsertStoredProcedure(self, collection_link, sproc, options=None, **kwargs): + """Upserts a stored procedure in a collection. + + :param str collection_link: + The link to the document collection. + :param str sproc: + :param dict options: + The request options for the request. + + :return: + The upserted Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) + return self.Upsert(sproc, path, "sprocs", collection_id, None, options, **kwargs) + + def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc.pop("serverScript", "")) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, sproc + + def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): + """Reads a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The read Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return self.Read(path, "sprocs", sproc_id, None, options, **kwargs) + + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): + """Reads conflicts. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryConflicts(collection_link, None, feed_options, **kwargs) + + def QueryConflicts(self, collection_link, query, options=None, **kwargs): + """Queries conflicts in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "conflicts") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "conflicts", collection_id, lambda r: r["Conflicts"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadConflict(self, conflict_link, options=None, **kwargs): + """Reads a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + + :return: + The read Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return self.Read(path, "conflicts", conflict_id, None, options, **kwargs) + + def DeleteContainer(self, collection_link, options=None, **kwargs): + """Deletes a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The deleted Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) + + def ReplaceItem(self, document_link, new_document, options=None, **kwargs): + """Replaces a document and returns it. + + :param str document_link: + The link to the document. + :param dict new_document: + :param dict options: + The request options for the request. + + :return: + The new Document. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(new_document) + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the function so that it remains local For more details on this gotcha, + # please refer http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # Extract the document collection link and add the partition key to options + collection_link = base.GetItemContainerLink(document_link) + options = self._AddPartitionKey(collection_link, new_document, options) + + return self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) + + def DeleteItem(self, document_link, options=None, **kwargs): + """Deletes a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The deleted Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return self.DeleteResource(path, "docs", document_id, None, options, **kwargs) + + def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): + """Replaces a trigger and returns it. + + :param str trigger_link: + The link to the trigger. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The replaced Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger["serverScript"]) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) + + def DeleteTrigger(self, trigger_link, options=None, **kwargs): + """Deletes a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The deleted Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) + + def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): + """Replaces a user-defined function and returns it. + + :param str udf_link: + The link to the user-defined function. + :param dict udf: + :param dict options: + The request options for the request. + + :return: + The new UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf["serverScript"]) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) + + def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Deletes a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The deleted UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) + + def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): + """Executes a store procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict params: + List or None + :param dict options: + The request options for the request. + + :return: + The Stored Procedure response. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = dict(self.default_headers) + initial_headers.update({http_constants.HttpHeaders.Accept: (runtime_constants.MediaTypes.Json)}) + + if params and not isinstance(params, list): + params = [params] + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + headers = base.GetHeaders(self, initial_headers, "post", path, sproc_id, "sprocs", options) + + # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) + result, self.last_response_headers = self.__Post(path, request_params, params, headers, **kwargs) + return result + + def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): + """Replaces a stored procedure and returns it. + + :param str sproc_link: + The link to the stored procedure. + :param dict sproc: + :param dict options: + The request options for the request. + + :return: + The replaced Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc["serverScript"]) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) + + def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): + """Deletes a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The deleted Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) + + def DeleteConflict(self, conflict_link, options=None, **kwargs): + """Deletes a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + The request options for the request. + + :return: + The deleted Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) + + def ReplaceOffer(self, offer_link, offer, **kwargs): + """Replaces an offer and returns it. + + :param str offer_link: + The link to the offer. + :param dict offer: + + :return: + The replaced Offer. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(offer) + path = base.GetPathFromLink(offer_link) + offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) + return self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) + + def ReadOffer(self, offer_link, **kwargs): + """Reads an offer. + + :param str offer_link: + The link to the offer. + + :return: + The read Offer. + :rtype: + dict + + """ + path = base.GetPathFromLink(offer_link) + offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) + return self.Read(path, "offers", offer_id, None, {}, **kwargs) + + def ReadOffers(self, options=None, **kwargs): + """Reads all offers. + + :param dict options: + The request options for the request + + :return: + Query Iterable of Offers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryOffers(None, options, **kwargs) + + def QueryOffers(self, query, options=None, **kwargs): + """Query for all offers. + + :param (str or dict) query: + :param dict options: + The request options for the request + + :return: + Query Iterable of Offers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + def fetch_fn(options): + return ( + self.__QueryFeed( + "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + async def GetDatabaseAccount(self, url_connection=None, **kwargs): + """Gets database account info. + + :return: + The Database Account. + :rtype: + documents.DatabaseAccount + + """ + if url_connection is None: + url_connection = self.url_connection + + initial_headers = dict(self.default_headers) + headers = base.GetHeaders(self, initial_headers, "get", "", "", "", {}) # path # id # type + + request_params = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) + result, self.last_response_headers = await self.__Get("", request_params, headers, **kwargs) + database_account = documents.DatabaseAccount() + database_account.DatabasesLink = "/dbs/" + database_account.MediaLink = "/media/" + if http_constants.HttpHeaders.MaxMediaStorageUsageInMB in self.last_response_headers: + database_account.MaxMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.MaxMediaStorageUsageInMB + ] + if http_constants.HttpHeaders.CurrentMediaStorageUsageInMB in self.last_response_headers: + database_account.CurrentMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.CurrentMediaStorageUsageInMB + ] + database_account.ConsistencyPolicy = result.get(constants._Constants.UserConsistencyPolicy) + + # WritableLocations and ReadableLocations fields will be available only for geo-replicated database accounts + if constants._Constants.WritableLocations in result: + database_account._WritableLocations = result[constants._Constants.WritableLocations] + if constants._Constants.ReadableLocations in result: + database_account._ReadableLocations = result[constants._Constants.ReadableLocations] + if constants._Constants.EnableMultipleWritableLocations in result: + database_account._EnableMultipleWritableLocations = result[ + constants._Constants.EnableMultipleWritableLocations + ] + + self._useMultipleWriteLocations = ( + self.connection_policy.UseMultipleWriteLocations and database_account._EnableMultipleWritableLocations + ) + return database_account + + async def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Creates a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The created Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + # Create will use WriteEndpoint since it uses POST operation + + request_params = _request_object.RequestObject(typ, documents._OperationType.Create) + result, self.last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) + + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + def Upsert(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Upserts a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + + headers[http_constants.HttpHeaders.IsUpsert] = True + + # Upsert will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) + result, self.last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Replaces a Azure Cosmos resource and returns it. + + :param dict resource: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The new Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "put", path, id, typ, options) + # Replace will use WriteEndpoint since it uses PUT operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) + result, self.last_response_headers = self.__Put(path, request_params, resource, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Reads a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) + # Read will use ReadEndpoint since it uses GET operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) + return result + + async def ReadAsync(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Reads a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) + # Read will use ReadEndpoint since it uses GET operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + result, self.last_response_headers = await self.__Get(path, request_params, headers, **kwargs) + return result + + def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Deletes a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The deleted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "delete", path, id, typ, options) + # Delete will use WriteEndpoint since it uses DELETE operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) + result, self.last_response_headers = self.__Delete(path, request_params, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + + return result + + async def __Get(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'GET' http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.get(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + def __GetSync(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'GET' http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.get(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + def __Post(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'POST' http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.post(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + + def __Put(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'PUT' http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.put(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + + def __Delete(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'DELETE' http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.delete(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): + """Query Feed for Document Collection resource. + + :param str path: + Path to the document collection. + :param str collection_id: + Id of the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Partition key range id. + :rtype: + tuple + + """ + return ( + self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + partition_key_range_id, + **kwargs + ), + self.last_response_headers, + ) + + def __QueryFeed( + self, + path, + typ, + id_, + result_fn, + create_fn, + query, + options=None, + partition_key_range_id=None, + response_hook=None, + is_query_plan=False, + **kwargs + ): + """Query for more than one Azure Cosmos resources. + + :param str path: + :param str typ: + :param str id_: + :param function result_fn: + :param function create_fn: + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param function response_hook: + :param bool is_query_plan: + Specififes if the call is to fetch query plan + + :rtype: + list + + :raises SystemError: If the query compatibility mode is undefined. + + """ + if options is None: + options = {} + + if query: + __GetBodiesFromQueryResult = result_fn + else: + + def __GetBodiesFromQueryResult(result): + if result is not None: + return [create_fn(self, body) for body in result_fn(result)] + # If there is no change feed, the result data is empty and result is None. + # This case should be interpreted as an empty array. + return [] + + initial_headers = self.default_headers.copy() + # Copy to make sure that default_headers won't be changed. + if query is None: + # Query operations will use ReadEndpoint even though it uses GET(for feed requests) + request_params = _request_object.RequestObject(typ, + documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) + headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) + if response_hook: + response_hook(self.last_response_headers, result) + return __GetBodiesFromQueryResult(result) + + query = self.__CheckAndUnifyQueryFormat(query) + + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + if not is_query_plan: + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.QueryJson + elif self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery: + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.SQL + else: + raise SystemError("Unexpected query compatibility mode.") + + # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) + request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) + req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = self.__Post(path, request_params, query, req_headers, **kwargs) + + if response_hook: + response_hook(self.last_response_headers, result) + + return __GetBodiesFromQueryResult(result) + + def _GetQueryPlanThroughGateway(self, query, resource_link, **kwargs): + supported_query_features = (documents._QueryFeature.Aggregate + "," + + documents._QueryFeature.CompositeAggregate + "," + + documents._QueryFeature.Distinct + "," + + documents._QueryFeature.MultipleOrderBy + "," + + documents._QueryFeature.OffsetAndLimit + "," + + documents._QueryFeature.OrderBy + "," + + documents._QueryFeature.Top) + + options = { + "contentType": runtime_constants.MediaTypes.Json, + "isQueryPlanRequest": True, + "supportedQueryFeatures": supported_query_features, + "queryVersion": http_constants.Versions.QueryVersion + } + + resource_link = base.TrimBeginningAndEndingSlashes(resource_link) + path = base.GetPathFromLink(resource_link, "docs") + resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) + + return self.__QueryFeed(path, + "docs", + resource_id, + lambda r: r, + None, + query, + options, + is_query_plan=True, + **kwargs) + + def __CheckAndUnifyQueryFormat(self, query_body): + """Checks and unifies the format of the query body. + + :raises TypeError: If query_body is not of expected type (depending on the query compatibility mode). + :raises ValueError: If query_body is a dict but doesn\'t have valid query text. + :raises SystemError: If the query compatibility mode is undefined. + + :param (str or dict) query_body: + + :return: + The formatted query body. + :rtype: + dict or string + """ + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + if not isinstance(query_body, dict) and not isinstance(query_body, six.string_types): + raise TypeError("query body must be a dict or string.") + if isinstance(query_body, dict) and not query_body.get("query"): + raise ValueError('query body must have valid query text with key "query".') + if isinstance(query_body, six.string_types): + return {"query": query_body} + elif ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery + and not isinstance(query_body, six.string_types) + ): + raise TypeError("query body must be a string.") + else: + raise SystemError("Unexpected query compatibility mode.") + + return query_body + + @staticmethod + def __ValidateResource(resource): + id_ = resource.get("id") + if id_: + try: + if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1: + raise ValueError("Id contains illegal chars.") + + if id_[-1] == " ": + raise ValueError("Id ends with a space.") + except AttributeError: + raise_with_traceback(TypeError, message="Id type must be a string.") + + # Adds the partition key to options + def _AddPartitionKey(self, collection_link, document, options): + collection_link = base.TrimBeginningAndEndingSlashes(collection_link) + + # TODO: Refresh the cache if partition is extracted automatically and we get a 400.1001 + + # If the document collection link is present in the cache, then use the cached partitionkey definition + if collection_link in self.partition_key_definition_cache: + partitionKeyDefinition = self.partition_key_definition_cache.get(collection_link) + # Else read the collection from backend and add it to the cache + else: + collection = self.ReadContainer(collection_link) + partitionKeyDefinition = collection.get("partitionKey") + self.partition_key_definition_cache[collection_link] = partitionKeyDefinition + + # If the collection doesn't have a partition key definition, skip it as it's a legacy collection + if partitionKeyDefinition: + # If the user has passed in the partitionKey in options use that elase extract it from the document + if "partitionKey" not in options: + partitionKeyValue = self._ExtractPartitionKey(partitionKeyDefinition, document) + options["partitionKey"] = partitionKeyValue + + return options + + # Extracts the partition key from the document using the partitionKey definition + def _ExtractPartitionKey(self, partitionKeyDefinition, document): + + # Parses the paths into a list of token each representing a property + partition_key_parts = base.ParsePaths(partitionKeyDefinition.get("paths")) + # Check if the partitionKey is system generated or not + is_system_key = partitionKeyDefinition["systemKey"] if "systemKey" in partitionKeyDefinition else False + + # Navigates the document to retrieve the partitionKey specified in the paths + return self._retrieve_partition_key(partition_key_parts, document, is_system_key) + + # Navigates the document to retrieve the partitionKey specified in the partition key parts + def _retrieve_partition_key(self, partition_key_parts, document, is_system_key): + expected_matchCount = len(partition_key_parts) + matchCount = 0 + partitionKey = document + + for part in partition_key_parts: + # At any point if we don't find the value of a sub-property in the document, we return as Undefined + if part not in partitionKey: + return self._return_undefined_or_empty_partition_key(is_system_key) + + partitionKey = partitionKey.get(part) + matchCount += 1 + # Once we reach the "leaf" value(not a dict), we break from loop + if not isinstance(partitionKey, dict): + break + + # Match the count of hops we did to get the partitionKey with the length of + # partition key parts and validate that it's not a dict at that level + if (matchCount != expected_matchCount) or isinstance(partitionKey, dict): + return self._return_undefined_or_empty_partition_key(is_system_key) + + return partitionKey + + def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): + """ + Updates session if necessary. + + :param dict response_result: + :param dict response_headers: + :param dict response_headers + + :return: + None, but updates the client session if necessary. + + """ + + # if this request was made with consistency level as session, then update the session + if response_result is None or response_headers is None: + return + + is_session_consistency = False + if http_constants.HttpHeaders.ConsistencyLevel in request_headers: + if documents.ConsistencyLevel.Session == request_headers[http_constants.HttpHeaders.ConsistencyLevel]: + is_session_consistency = True + + if is_session_consistency: + # update session + self.session.update_session(response_result, response_headers) + + @staticmethod + def _return_undefined_or_empty_partition_key(is_system_key): + if is_system_key: + return _Empty + return _Undefined diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py new file mode 100644 index 000000000000..2248619fb335 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -0,0 +1,174 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for global endpoint manager implementation in the Azure Cosmos +database service. +""" + +import threading + +from six.moves.urllib.parse import urlparse + +from .. import _constants as constants +from .. import exceptions +from .._location_cache import LocationCache + +# pylint: disable=protected-access + + +class _GlobalEndpointManager(object): + """ + This internal class implements the logic for endpoint management for + geo-replicated database accounts. + """ + + def __init__(self, client): + self.Client = client + self.EnableEndpointDiscovery = client.connection_policy.EnableEndpointDiscovery + self.PreferredLocations = client.connection_policy.PreferredLocations + self.DefaultEndpoint = client.url_connection + self.refresh_time_interval_in_ms = self.get_refresh_time_interval_in_ms_stub() + self.location_cache = LocationCache( + self.PreferredLocations, + self.DefaultEndpoint, + self.EnableEndpointDiscovery, + client.connection_policy.UseMultipleWriteLocations, + self.refresh_time_interval_in_ms, + ) + self.refresh_needed = False + self.refresh_lock = threading.RLock() + self.last_refresh_time = 0 + + def get_refresh_time_interval_in_ms_stub(self): # pylint: disable=no-self-use + return constants._Constants.DefaultUnavailableLocationExpirationTime + + def get_write_endpoint(self): + return self.location_cache.get_write_endpoint() + + def get_read_endpoint(self): + return self.location_cache.get_read_endpoint() + + def resolve_service_endpoint(self, request): + return self.location_cache.resolve_service_endpoint(request) + + def mark_endpoint_unavailable_for_read(self, endpoint): + self.location_cache.mark_endpoint_unavailable_for_read(endpoint) + + def mark_endpoint_unavailable_for_write(self, endpoint): + self.location_cache.mark_endpoint_unavailable_for_write(endpoint) + + def get_ordered_write_endpoints(self): + return self.location_cache.get_ordered_write_endpoints() + + def get_ordered_read_endpoints(self): + return self.location_cache.get_ordered_read_endpoints() + + def can_use_multiple_write_locations(self, request): + return self.location_cache.can_use_multiple_write_locations_for_request(request) + + def force_refresh(self, database_account): + self.refresh_needed = True + self.refresh_endpoint_list(database_account) + + def refresh_endpoint_list(self, database_account, **kwargs): + with self.refresh_lock: + # if refresh is not needed or refresh is already taking place, return + if not self.refresh_needed: + return + try: + self._refresh_endpoint_list_private(database_account, **kwargs) + except Exception as e: + raise e + + def _refresh_endpoint_list_private(self, database_account=None, **kwargs): + if database_account: + self.location_cache.perform_on_database_account_read(database_account) + self.refresh_needed = False + + if ( + self.location_cache.should_refresh_endpoints() + and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms + ): + if not database_account: + database_account = self._GetDatabaseAccount(**kwargs) + self.location_cache.perform_on_database_account_read(database_account) + self.last_refresh_time = self.location_cache.current_time_millis() + self.refresh_needed = False + + def _GetDatabaseAccount(self, **kwargs): + """Gets the database account. + + First tries by using the default endpoint, and if that doesn't work, + use the endpoints for the preferred locations in the order they are + specified, to get the database account. + """ + try: + database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) + return database_account + # If for any reason(non-globaldb related), we are not able to get the database + # account from the above call to GetDatabaseAccount, we would try to get this + # information from any of the preferred locations that the user might have + # specified (by creating a locational endpoint) and keeping eating the exception + # until we get the database account and return None at the end, if we are not able + # to get that info from any endpoints + except exceptions.CosmosHttpResponseError: + for location_name in self.PreferredLocations: + locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) + try: + database_account = self._GetDatabaseAccountStub(locational_endpoint, **kwargs) + return database_account + except exceptions.CosmosHttpResponseError: + pass + + return None + + def _GetDatabaseAccountStub(self, endpoint, **kwargs): + """Stub for getting database account from the client. + + This can be used for mocking purposes as well. + """ + return self.Client.GetDatabaseAccount(endpoint, **kwargs) + + @staticmethod + def GetLocationalEndpoint(default_endpoint, location_name): + # For default_endpoint like 'https://contoso.documents.azure.com:443/' parse it to + # generate URL format. This default_endpoint should be global endpoint(and cannot + # be a locational endpoint) and we agreed to document that + endpoint_url = urlparse(default_endpoint) + + # hostname attribute in endpoint_url will return 'contoso.documents.azure.com' + if endpoint_url.hostname is not None: + hostname_parts = str(endpoint_url.hostname).lower().split(".") + if hostname_parts is not None: + # global_database_account_name will return 'contoso' + global_database_account_name = hostname_parts[0] + + # Prepare the locational_database_account_name as contoso-EastUS for location_name 'East US' + locational_database_account_name = global_database_account_name + "-" + location_name.replace(" ", "") + + # Replace 'contoso' with 'contoso-EastUS' and return locational_endpoint + # as https://contoso-EastUS.documents.azure.com:443/ + locational_endpoint = default_endpoint.lower().replace( + global_database_account_name, locational_database_account_name, 1 + ) + return locational_endpoint + + return None diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py new file mode 100644 index 000000000000..6120d533c918 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py @@ -0,0 +1,802 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete items in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import + +import six +import asyncio +import time +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from ..exceptions import CosmosResourceNotFoundError +from ..http_constants import StatusCodes +from ..offer import Offer +from ..scripts import ScriptsProxy +from ..partition_key import NonePartitionKeyValue + +__all__ = ("ContainerProxy",) + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class ContainerProxy(object): + """An interface to interact with a specific DB Container. + + This class should not be instantiated directly. Instead, use the + :func:`DatabaseProxy.get_container_client` method to get an existing + container, or the :func:`Database.create_container` method to create a + new container. + + A container in an Azure Cosmos DB SQL API database is a collection of + documents, each of which is represented as an Item. + + :ivar str id: ID (name) of the container + :ivar str session_token: The session token for the container. + """ + + def __init__(self, client_connection, database_link, id, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None + self.client_connection = client_connection + self.id = id + self._properties = properties + self.container_link = u"{}/colls/{}".format(database_link, self.id) + self._is_system_key = None + self._scripts = None # type: Optional[ScriptsProxy] + + def __repr__(self): + # type () -> str + return "".format(self.container_link)[:1024] + + def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = self.read() + return self._properties + + @property + def is_system_key(self): + # type: () -> bool + if self._is_system_key is None: + properties = self._get_properties() + self._is_system_key = ( + properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False + ) + return cast('bool', self._is_system_key) + + @property + def scripts(self): + # type: () -> ScriptsProxy + if self._scripts is None: + self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + return cast('ScriptsProxy', self._scripts) + + def _get_document_link(self, item_or_link): + # type: (Union[Dict[str, Any], str]) -> str + if isinstance(item_or_link, six.string_types): + return u"{}/docs/{}".format(self.container_link, item_or_link) + return item_or_link["_self"] + + def _get_conflict_link(self, conflict_or_link): + # type: (Union[Dict[str, Any], str]) -> str + if isinstance(conflict_or_link, six.string_types): + return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) + return conflict_or_link["_self"] + + def _set_partition_key(self, partition_key): + if partition_key == NonePartitionKeyValue: + return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + return partition_key + + @distributed_trace + def read( + self, + populate_query_metrics=None, # type: Optional[bool] + populate_partition_key_range_statistics=None, # type: Optional[bool] + populate_quota_info=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Read the container properties. + + :param populate_query_metrics: Enable returning query metrics in response headers. + :param populate_partition_key_range_statistics: Enable returning partition key + range statistics in response headers. + :param populate_quota_info: Enable returning collection storage quota information in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. + This includes if the container does not exist. + :returns: Dict representing the retrieved container. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if populate_partition_key_range_statistics is not None: + request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics + if populate_quota_info is not None: + request_options["populateQuotaInfo"] = populate_quota_info + + collection_link = self.container_link + self._properties = self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + async def read_item( + self, + item, # type: Union[str, Dict[str, Any]] + partition_key, # type: Any + populate_query_metrics=None, # type: Optional[bool] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Get the item identified by `item`. + + :param item: The ID (name) or dict representing item to retrieve. + :param partition_key: Partition key for the item to retrieve. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Dict representing the item to be retrieved. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START update_item] + :end-before: [END update_item] + :language: python + :dedent: 0 + :caption: Get an item from the database and update one of its properties: + :name: update_item + """ + doc_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = await self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def read_all_items( + self, + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List all the items in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.ReadItems( + collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def query_items_change_feed( + self, + partition_key_range_id=None, # type: Optional[str] + is_start_from_beginning=False, # type: bool + continuation=None, # type: Optional[str] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Get a sorted list of items that were changed, in the order in which they were modified. + + :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. + This is used to process the change feed in parallel across multiple consumers. + :param partition_key: partition key at which ChangeFeed requests are targetted. + :param is_start_from_beginning: Get whether change feed should start from + beginning (true) or from current (false). By default it's start from current (false). + :param continuation: e_tag value to be used as continuation for reading change feed. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key_range_id is not None: + feed_options["partitionKeyRangeId"] = partition_key_range_id + partition_key = kwargs.pop("partitionKey", None) + if partition_key is not None: + feed_options["partitionKey"] = partition_key + if is_start_from_beginning is not None: + feed_options["isStartFromBeginning"] = is_start_from_beginning + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if continuation is not None: + feed_options["continuation"] = continuation + + if hasattr(response_hook, "clear"): + response_hook.clear() + + result = self.client_connection.QueryItemsChangeFeed( + self.container_link, options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_items( + self, + query, # type: str + parameters=None, # type: Optional[List[Dict[str, object]]] + partition_key=None, # type: Optional[Any] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + enable_scan_in_query=None, # type: Optional[bool] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Return all results matching the given `query`. + + You can use any value for the container name in the FROM clause, but + often the container name is used. In the examples below, the container + name is "products," and is aliased as "p" for easier referencing in + the WHERE clause. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. + :param partition_key: Specifies the partition key value for the item. + :param enable_cross_partition_query: Allows sending of more than one request to + execute the query in the Azure Cosmos DB service. + More than one request is necessary if the query is not scoped to single partition key value. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param enable_scan_in_query: Allow scan on the queries which couldn't be served as + indexing was opted out on the requested paths. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START query_items] + :end-before: [END query_items] + :language: python + :dedent: 0 + :caption: Get all products that have not been discontinued: + :name: query_items + + .. literalinclude:: ../samples/examples.py + :start-after: [START query_items_param] + :end-before: [END query_items_param] + :language: python + :dedent: 0 + :caption: Parameterized query to get all products that have been discontinued: + :name: query_items_param + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + if enable_scan_in_query is not None: + feed_options["enableScanInQuery"] = enable_scan_in_query + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.QueryItems( + database_or_container_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + partition_key=partition_key, + response_hook=response_hook, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def replace_item( + self, + item, # type: Union[str, Dict[str, Any]] + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Replaces the specified item if it exists in the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be replaced. + :param body: A dict-like object representing the item to replace. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the item after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace failed or the item with + given id does not exist. + :rtype: dict[str, Any] + """ + item_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableIdGeneration"] = True + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = self.client_connection.ReplaceItem( + document_link=item_link, new_document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def upsert_item( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Insert or update the specified item. + + If the item already exists in the container, it is replaced. If the item + does not already exist, it is inserted. + + :param body: A dict-like object representing the item to update or insert. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the upserted item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableIdGeneration"] = True + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = self.client_connection.UpsertItem( + database_or_container_link=self.container_link, + document=body, + options=request_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def create_item( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Create an item in the container. + + To update or replace an existing item, use the + :func:`ContainerProxy.upsert_item` method. + + :param body: A dict-like object representing the item to create. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :param indexing_directive: Indicate whether the document should be omitted from indexing. + :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. + :rtype: dict[str, Any] + """ + start = time.time() + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) + if populate_query_metrics: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + if indexing_directive is not None: + request_options["indexingDirective"] = indexing_directive + + result = self.client_connection.CreateItem( + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + print(f"Create item took {(time.time() - start) * 1000} ms") + print("ASYNC CONTAINER USED") + return result + + @distributed_trace + async def create_item_aio( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Create an item in the container. + + To update or replace an existing item, use the + :func:`ContainerProxy.upsert_item` method. + + :param body: A dict-like object representing the item to create. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :param indexing_directive: Indicate whether the document should be omitted from indexing. + :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. + :rtype: dict[str, Any] + """ + start = time.time() + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) + if populate_query_metrics: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + if indexing_directive is not None: + request_options["indexingDirective"] = indexing_directive + + result = await self.client_connection.CreateItemAIO( + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) #what is this doing? can't find function + print(f"Create item took {(time.time() - start) * 1000} ms") + return result + + @distributed_trace + def delete_item( + self, + item, # type: Union[Dict[str, Any], str] + partition_key, # type: Any + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete the specified item from the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be deleted. + :param partition_key: Specifies the partition key value for the item. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The item does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + document_link = self._get_document_link(item) + result = self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """Read the Offer object for this container. + + If no Offer already exists for the container, an exception is raised. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No offer exists for the container or + the offer could not be retrieved. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) + if not offers: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, offers) + + return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) + + @distributed_trace + def replace_throughput(self, throughput, **kwargs): + # type: (int, Any) -> Offer + """Replace the container's throughput. + + If no Offer already exists for the container, an exception is raised. + + :param throughput: The throughput to be set (an integer). + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the container, updated with new throughput. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No offer exists for the container + or the offer could not be updated. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) + if not offers: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + new_offer = offers[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) + + @distributed_trace + def list_conflicts(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the conflicts in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of conflicts (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadConflicts( + collection_link=self.container_link, feed_options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_conflicts( + self, + query, # type: str + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + partition_key=None, # type: Optional[Any] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Return all conflicts matching a given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param enable_cross_partition_query: Allows sending of more than one request to execute + the query in the Azure Cosmos DB service. + More than one request is necessary if the query is not scoped to single partition key value. + :param partition_key: Specifies the partition key value for the item. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of conflicts (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.QueryConflicts( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def get_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] + """Get the conflict identified by `conflict`. + + :param conflict: The ID (name) or dict representing the conflict to retrieve. + :param partition_key: Partition key for the conflict to retrieve. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the retrieved conflict. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given conflict couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.ReadConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def delete_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> None + """Delete a specified conflict from the container. + + If the conflict does not already exist in the container, an exception is raised. + + :param conflict: The ID (name) or dict representing the conflict to be deleted. + :param partition_key: Partition key for the conflict to delete. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The conflict does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.DeleteConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py new file mode 100644 index 000000000000..879cafcf7f9f --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py @@ -0,0 +1,456 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, and delete databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Optional, Union, cast, Iterable, List # pylint: disable=unused-import + +import six +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from .._retry_utility import ConnectionRetryPolicy +from .database_async import DatabaseProxy +from ..documents import ConnectionPolicy, DatabaseAccount +from ..exceptions import CosmosResourceNotFoundError + +__all__ = ("CosmosClient",) + + +def _parse_connection_str(conn_str, credential): + # type: (str, Optional[Any]) -> Dict[str, str] + conn_str = conn_str.rstrip(";") + conn_settings = dict( # type: ignore # pylint: disable=consider-using-dict-comprehension + s.split("=", 1) for s in conn_str.split(";") + ) + if 'AccountEndpoint' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountEndpoint'.") + if not credential and 'AccountKey' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountKey'.") + return conn_settings + + +def _build_auth(credential): + # type: (Any) -> Dict[str, Any] + auth = {} + if isinstance(credential, six.string_types): + auth['masterKey'] = credential + elif isinstance(credential, dict): + if any(k for k in credential.keys() if k in ['masterKey', 'resourceTokens', 'permissionFeed']): + return credential # Backwards compatible + auth['resourceTokens'] = credential # type: ignore + elif hasattr(credential, '__iter__'): + auth['permissionFeed'] = credential + else: + raise TypeError( + "Unrecognized credential type. Please supply the master key as str, " + "or a dictionary or resource tokens, or a list of permissions.") + return auth + + +def _build_connection_policy(kwargs): + # type: (Dict[str, Any]) -> ConnectionPolicy + # pylint: disable=protected-access + policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() + + # Connection config + policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ + kwargs.pop('connection_timeout', None) or \ + policy.RequestTimeout + policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode + policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration + policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', None) or policy.EnableEndpointDiscovery + policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations + policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ + policy.UseMultipleWriteLocations + + # SSL config + verify = kwargs.pop('connection_verify', None) + policy.DisableSSLVerification = not bool(verify if verify is not None else True) + ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration + if ssl: + ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile + ssl.SSLCaCerts = verify or ssl.SSLCaCerts + policy.SSLConfiguration = ssl + + # Retry config + retry = kwargs.pop('retry_options', None) or policy.RetryOptions + total_retries = kwargs.pop('retry_total', None) + retry._max_retry_attempt_count = total_retries or retry._max_retry_attempt_count + retry._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ + retry._fixed_retry_interval_in_milliseconds + max_backoff = kwargs.pop('retry_backoff_max', None) + retry._max_wait_time_in_seconds = max_backoff or retry._max_wait_time_in_seconds + policy.RetryOptions = retry + connection_retry = kwargs.pop('connection_retry_policy', None) or policy.ConnectionRetryConfiguration + if not connection_retry: + connection_retry = ConnectionRetryPolicy( + retry_total=total_retries, + retry_connect=kwargs.pop('retry_connect', None), + retry_read=kwargs.pop('retry_read', None), + retry_status=kwargs.pop('retry_status', None), + retry_backoff_max=max_backoff, + retry_on_status_codes=kwargs.pop('retry_on_status_codes', []), + retry_backoff_factor=kwargs.pop('retry_backoff_factor', 0.8), + ) + policy.ConnectionRetryConfiguration = connection_retry + + return policy + + + +class AsyncCosmosClient(object): + """A client-side logical representation of an Azure Cosmos DB account. + + Use this client to configure and execute requests to the Azure Cosmos DB service. + + :param str url: The URL of the Cosmos DB account. + :param credential: Can be the account key, or a dictionary of resource tokens. + :type credential: str or dict[str, str] + :param str consistency_level: Consistency level to use for the session. The default value is "Session". + :keyword int timeout: An absolute timeout in seconds, for the combined HTTP request and response processing. + :keyword int request_timeout: The HTTP request timeout in milliseconds. + :keyword str connection_mode: The connection mode for the client - currently only supports 'Gateway'. + :keyword proxy_config: Connection proxy configuration. + :paramtype proxy_config: ~azure.cosmos.ProxyConfiguration + :keyword ssl_config: Connection SSL configuration. + :paramtype ssl_config: ~azure.cosmos.SSLConfiguration + :keyword bool connection_verify: Whether to verify the connection, default value is True. + :keyword str connection_cert: An alternative certificate to verify the connection. + :keyword int retry_total: Maximum retry attempts. + :keyword int retry_backoff_max: Maximum retry wait time in seconds. + :keyword int retry_fixed_interval: Fixed retry interval in milliseconds. + :keyword int retry_read: Maximum number of socket read retry attempts. + :keyword int retry_connect: Maximum number of connection error retry attempts. + :keyword int retry_status: Maximum number of retry attempts on error status codes. + :keyword list[int] retry_on_status_codes: A list of specific status codes to retry on. + :keyword float retry_backoff_factor: Factor to calculate wait time between retry attempts. + :keyword bool enable_endpoint_discovery: Enable endpoint discovery for + geo-replicated database accounts. (Default: True) + :keyword list[str] preferred_locations: The preferred locations for geo-replicated database accounts. + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client + """ + + def __init__(self, url, credential, consistency_level="Session", **kwargs): + # type: (str, Any, str, Any) -> None + """Instantiate a new CosmosClient.""" + auth = _build_auth(credential) + connection_policy = _build_connection_policy(kwargs) + self.client_connection = CosmosClientConnection( + url, auth=auth, consistency_level=consistency_level, connection_policy=connection_policy, **kwargs + ) + + def __repr__(self): # pylint:disable=client-method-name-no-double-underscore + # type () -> str + return "".format(self.client_connection.url_connection)[:1024] + + def __enter__(self): + self.client_connection.pipeline_client.__enter__() + return self + + def __exit__(self, *args): + return self.client_connection.pipeline_client.__exit__(*args) + + @classmethod + def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): + # type: (str, Optional[Any], str, Any) -> CosmosClient + """Create a CosmosClient instance from a connection string. + + This can be retrieved from the Azure portal.For full list of optional + keyword arguments, see the CosmosClient constructor. + + :param str conn_str: The connection string. + :param credential: Alternative credentials to use instead of the key + provided in the connection string. + :type credential: str or dict(str, str) + :param str consistency_level: + Consistency level to use for the session. The default value is "Session". + """ + settings = _parse_connection_str(conn_str, credential) + return cls( + url=settings['AccountEndpoint'], + credential=credential or settings['AccountKey'], + consistency_level=consistency_level, + **kwargs + ) + + @staticmethod + def _get_database_link(database_or_id): + # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str + if isinstance(database_or_id, six.string_types): + return "dbs/{}".format(database_or_id) + try: + return cast("DatabaseProxy", database_or_id).database_link + except AttributeError: + pass + database_id = cast("Dict[str, str]", database_or_id)["id"] + return "dbs/{}".format(database_id) + + @distributed_trace + def create_database( # pylint: disable=redefined-builtin + self, + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> DatabaseProxy + """ + Create a new database with the given ID (name). + + :param id: ID (name) of the database to create. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A DatabaseProxy instance representing the new database. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.exceptions.CosmosResourceExistsError: Database with the given ID already exists. + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_database] + :end-before: [END create_database] + :language: python + :dedent: 0 + :caption: Create a database in the Cosmos DB account: + :name: create_database + """ + + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) + + @distributed_trace + def create_database_if_not_exists( # pylint: disable=redefined-builtin + self, + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> DatabaseProxy + """ + Create the database if it does not exist already. + + If the database already exists, the existing settings are returned. + + ..note:: + This function does not check or update existing database settings or + offer throughput if they differ from what is passed in. + + :param id: ID (name) of the database to read or create. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A DatabaseProxy instance representing the database. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The database read or creation failed. + """ + try: + database_proxy = self.get_database_client(id) + database_proxy.read( + populate_query_metrics=populate_query_metrics, + **kwargs + ) + return database_proxy + except CosmosResourceNotFoundError: + return self.create_database( + id, + populate_query_metrics=populate_query_metrics, + offer_throughput=offer_throughput, + **kwargs + ) + + def get_database_client(self, database): + # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy + """Retrieve an existing database with the ID (name) `id`. + + :param database: The ID (name), dict representing the properties or + `DatabaseProxy` instance of the database to read. + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy + :returns: A `DatabaseProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.DatabaseProxy + """ + if isinstance(database, DatabaseProxy): + id_value = database.id + else: + try: + id_value = database["id"] + except TypeError: + id_value = database + + return DatabaseProxy(self.client_connection, id_value) + + @distributed_trace + def list_databases( + self, + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List the databases in a Cosmos DB SQL database account. + + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of database properties (dicts). + :rtype: Iterable[dict[str, str]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace + def query_databases( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Query the databases in a Cosmos DB SQL database account. + + :param str query: The Azure Cosmos DB SQL query to execute. + :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be + served as indexing was opted out on the requested paths. + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of database properties (dicts). + :rtype: Iterable[dict[str, str]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + if query: + # This is currently eagerly evaluated in order to capture the headers + # from the call. + # (just returning a generator did not initiate the first network call, so + # the headers were misleading) + # This needs to change for "real" implementation + query = query if parameters is None else dict(query=query, parameters=parameters) # type: ignore + result = self.client_connection.QueryDatabases(query=query, options=feed_options, **kwargs) + else: + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace + def delete_database( + self, + database, # type: Union[str, DatabaseProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete the database with the given ID (name). + + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` + instance of the database to delete. + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the database couldn't be deleted. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + database_link = self._get_database_link(database) + self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + + @distributed_trace + def get_database_account(self, **kwargs): + # type: (Any) -> DatabaseAccount + """Retrieve the database account information. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. + :rtype: ~azure.cosmos.DatabaseAccount + """ + response_hook = kwargs.pop('response_hook', None) + result = self.client_connection.GetDatabaseAccount(**kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py new file mode 100644 index 000000000000..cbb1e0ab6902 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py @@ -0,0 +1,768 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Interact with databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, cast, Iterable, Optional + +import warnings +import six +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from .container_async import ContainerProxy +from ..offer import Offer +from ..http_constants import StatusCodes +from ..exceptions import CosmosResourceNotFoundError +from ..user import UserProxy +from ..documents import IndexingMode + +__all__ = ("DatabaseProxy",) + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class DatabaseProxy(object): + """An interface to interact with a specific database. + + This class should not be instantiated directly. Instead use the + :func:`CosmosClient.get_database_client` method. + + A database contains one or more containers, each of which can contain items, + stored procedures, triggers, and user-defined functions. + + A database can also have associated users, each of which is configured with + a set of permissions for accessing certain containers, stored procedures, + triggers, user-defined functions, or items. + + :ivar id: The ID (name) of the database. + + An Azure Cosmos DB SQL API database has the following system-generated + properties. These properties are read-only: + + * `_rid`: The resource ID. + * `_ts`: When the resource was last updated. The value is a timestamp. + * `_self`: The unique addressable URI for the resource. + * `_etag`: The resource etag required for optimistic concurrency control. + * `_colls`: The addressable path of the collections resource. + * `_users`: The addressable path of the users resource. + """ + + def __init__(self, client_connection, id, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, Dict[str, Any]) -> None + """ + :param ClientSession client_connection: Client from which this database was retrieved. + :param str id: ID (name) of the database. + """ + self.client_connection = client_connection + self.id = id + self.database_link = u"dbs/{}".format(self.id) + self._properties = properties + + def __repr__(self): + # type () -> str + return "".format(self.database_link)[:1024] + + @staticmethod + def _get_container_id(container_or_id): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str + if isinstance(container_or_id, six.string_types): + return container_or_id + try: + return cast("ContainerProxy", container_or_id).id + except AttributeError: + pass + return cast("Dict[str, str]", container_or_id)["id"] + + def _get_container_link(self, container_or_id): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str + return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) + + def _get_user_link(self, user_or_id): + # type: (Union[UserProxy, str, Dict[str, Any]]) -> str + if isinstance(user_or_id, six.string_types): + return u"{}/users/{}".format(self.database_link, user_or_id) + try: + return cast("UserProxy", user_or_id).user_link + except AttributeError: + pass + return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) + + def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = self.read() + return self._properties + + @distributed_trace + def read(self, populate_query_metrics=None, **kwargs): + # type: (Optional[bool], Any) -> Dict[str, Any] + """Read the database properties. + + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :rtype: Dict[Str, Any] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. + """ + # TODO this helper function should be extracted from CosmosClient + from .cosmos_client_async import CosmosClient + + database_link = CosmosClient._get_database_link(self) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + self._properties = self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + async def create_container( + self, + id, # type: str # pylint: disable=redefined-builtin + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Create a new container with the given ID (name). + + If a container with the given ID already exists, a CosmosResourceExistsError is raised. + + :param id: ID (name) of container to create. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param offer_throughput: The provisioned throughput for this offer. + :param unique_key_policy: The unique key policy to apply to the container. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :returns: A `ContainerProxy` instance representing the new container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container creation failed. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 0 + :caption: Create a container with default settings: + :name: create_container + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_container_with_settings] + :end-before: [END create_container_with_settings] + :language: python + :dedent: 0 + :caption: Create a container with specific settings; in this case, a custom partition key: + :name: create_container_with_settings + """ + definition = dict(id=id) # type: Dict[str, Any] + if partition_key is not None: + definition["partitionKey"] = partition_key + if indexing_policy is not None: + if indexing_policy.get("indexingMode") is IndexingMode.Lazy: + warnings.warn( + "Lazy indexing mode has been deprecated. Mode will be set to consistent indexing by the backend.", + DeprecationWarning + ) + definition["indexingPolicy"] = indexing_policy + if default_ttl is not None: + definition["defaultTtl"] = default_ttl + if unique_key_policy is not None: + definition["uniqueKeyPolicy"] = unique_key_policy + if conflict_resolution_policy is not None: + definition["conflictResolutionPolicy"] = conflict_resolution_policy + + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + if analytical_storage_ttl is not None: + definition["analyticalStorageTtl"] = analytical_storage_ttl + + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + data = self.client_connection.CreateContainer( + database_link=self.database_link, collection=definition, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) + + @distributed_trace + def create_container_if_not_exists( + self, + id, # type: str # pylint: disable=redefined-builtin + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Create a container if it does not exist already. + + If the container already exists, the existing settings are returned. + Note: it does not check or update the existing container settings or offer throughput + if they differ from what was passed into the method. + + :param id: ID (name) of container to read or create. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param offer_throughput: The provisioned throughput for this offer. + :param unique_key_policy: The unique key policy to apply to the container. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :returns: A `ContainerProxy` instance representing the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container read or creation failed. + :rtype: ~azure.cosmos.ContainerProxy + """ + + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + try: + container_proxy = self.get_container_client(id) + container_proxy.read( + populate_query_metrics=populate_query_metrics, + **kwargs + ) + return container_proxy + except CosmosResourceNotFoundError: + return self.create_container( + id=id, + partition_key=partition_key, + indexing_policy=indexing_policy, + default_ttl=default_ttl, + populate_query_metrics=populate_query_metrics, + offer_throughput=offer_throughput, + unique_key_policy=unique_key_policy, + conflict_resolution_policy=conflict_resolution_policy, + analytical_storage_ttl=analytical_storage_ttl + ) + + @distributed_trace + def delete_container( + self, + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete a container. + + :param container: The ID (name) of the container to delete. You can either + pass in the ID of the container to delete, a :class:`ContainerProxy` instance or + a dict representing the properties of the container. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the container couldn't be deleted. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + collection_link = self._get_container_link(container) + result = self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + def get_container_client(self, container): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy + """Get a `ContainerProxy` for a container with specified ID (name). + + :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, + or a dict representing the properties of the container to be retrieved. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START get_container] + :end-before: [END get_container] + :language: python + :dedent: 0 + :caption: Get an existing container, handling a failure if encountered: + :name: get_container + """ + if isinstance(container, ContainerProxy): + id_value = container.id + else: + try: + id_value = container["id"] + except TypeError: + id_value = container + + return ContainerProxy(self.client_connection, self.database_link, id_value) + + @distributed_trace + def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): + # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] + """List the containers in the database. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START list_containers] + :end-before: [END list_containers] + :language: python + :dedent: 0 + :caption: List all containers in the database: + :name: list_containers + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.ReadContainers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return response_hook + + @distributed_trace + def query_containers( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List the properties for containers in the current database. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.QueryContainers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def replace_container( + self, + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Reset the properties of the container. + + Property changes are persisted immediately. Any properties not specified + will be reset to their default values. + + :param container: The ID (name), dict representing the properties or + :class:`ContainerProxy` instance of the container to be replaced. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be replaced. + This includes if the container with given id does not exist. + :returns: A `ContainerProxy` instance representing the container after replace completed. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START reset_container_properties] + :end-before: [END reset_container_properties] + :language: python + :dedent: 0 + :caption: Reset the TTL property on a container, and display the updated properties: + :name: reset_container_properties + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + container_id = self._get_container_id(container) + container_link = self._get_container_link(container_id) + parameters = { + key: value + for key, value in { + "id": container_id, + "partitionKey": partition_key, + "indexingPolicy": indexing_policy, + "defaultTtl": default_ttl, + "conflictResolutionPolicy": conflict_resolution_policy, + }.items() + if value is not None + } + + container_properties = self.client_connection.ReplaceContainer( + container_link, collection=parameters, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, container_properties) + + return ContainerProxy( + self.client_connection, self.database_link, container_properties["id"], properties=container_properties + ) + + @distributed_trace + def list_users(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the users in the container. + + :param max_item_count: Max number of users to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of user properties (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadUsers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_users(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all users matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of users to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of user properties (dicts). + :rtype: Iterable[str, Any] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.QueryUsers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + def get_user_client(self, user): + # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy + """Get a `UserProxy` for a user with specified ID. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be retrieved. + :returns: A `UserProxy` instance representing the retrieved user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. + :rtype: ~azure.cosmos.UserProxy + """ + if isinstance(user, UserProxy): + id_value = user.id + else: + try: + id_value = user["id"] + except TypeError: + id_value = user + + return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + + @distributed_trace + def create_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """Create a new user in the container. + + To update or replace an existing user, use the + :func:`ContainerProxy.upsert_user` method. + + :param body: A dict-like object with an `id` key and value representing the user to be created. + The user ID must be unique within the database, and consist of no more than 255 characters. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the new user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be created. + :rtype: ~azure.cosmos.UserProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_user] + :end-before: [END create_user] + :language: python + :dedent: 0 + :caption: Create a database user: + :name: create_user + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = self.client_connection.CreateUser( + database_link=self.database_link, user=body, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + @distributed_trace + def upsert_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """Insert or update the specified user. + + If the user already exists in the container, it is replaced. If the user + does not already exist, it is inserted. + + :param body: A dict-like object representing the user to update or insert. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the upserted user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user could not be upserted. + :rtype: ~azure.cosmos.UserProxy + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = self.client_connection.UpsertUser( + database_link=self.database_link, user=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + @distributed_trace + def replace_user( + self, + user, # type: Union[str, UserProxy, Dict[str, Any]] + body, # type: Dict[str, Any] + **kwargs # type: Any + ): + # type: (...) -> UserProxy + """Replaces the specified user if it exists in the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be replaced. + :param body: A dict-like object representing the user to replace. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the user after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If the replace failed or the user with given ID does not exist. + :rtype: ~azure.cosmos.UserProxy + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + replaced_user = self.client_connection.ReplaceUser( + user_link=self._get_user_link(user), user=body, options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, replaced_user) + + return UserProxy( + client_connection=self.client_connection, + id=replaced_user["id"], + database_link=self.database_link, + properties=replaced_user + ) + + @distributed_trace + def delete_user(self, user, **kwargs): + # type: (Union[str, UserProxy, Dict[str, Any]], Any) -> None + """Delete the specified user from the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be deleted. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The user wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The user does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = self.client_connection.DeleteUser( + user_link=self._get_user_link(user), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """Read the Offer object for this database. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the database. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If no offer exists for the database or if the offer could not be retrieved. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) + if not offers: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for database " + self.database_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, offers) + + return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) + + @distributed_trace + def replace_throughput(self, throughput, **kwargs): + # type: (Optional[int], Any) -> Offer + """Replace the database-level throughput. + + :param throughput: The throughput to be set (an integer). + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the database, updated with new throughput. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If no offer exists for the database or if the offer could not be updated. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = list(self.client_connection.QueryOffers(query_spec)) + if not offers: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for collection " + self.database_link) + new_offer = offers[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 7286b2b8d0b4..1e8b9f51d483 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -25,6 +25,8 @@ from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import import six +import asyncio +import time from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection @@ -496,6 +498,7 @@ def create_item( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. :rtype: dict[str, Any] """ + start = time.time() request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -514,6 +517,61 @@ def create_item( ) if response_hook: response_hook(self.client_connection.last_response_headers, result) + print(f"Create item took {(time.time() - start) * 1000} ms") + return result + + @distributed_trace + async def create_item_aio( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Create an item in the container. + + To update or replace an existing item, use the + :func:`ContainerProxy.upsert_item` method. + + :param body: A dict-like object representing the item to create. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :param indexing_directive: Indicate whether the document should be omitted from indexing. + :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. + :rtype: dict[str, Any] + """ + start = time.time() + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) + if populate_query_metrics: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + if indexing_directive is not None: + request_options["indexingDirective"] = indexing_directive + + result = await self.client_connection.CreateItemAIO( + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) #what is this doing? can't find function + print(f"Create item took {(time.time() - start) * 1000} ms") return result @distributed_trace diff --git a/sdk/cosmos/azure-cosmos/samples/heroes.py b/sdk/cosmos/azure-cosmos/samples/heroes.py new file mode 100644 index 000000000000..65b4f5ece427 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/heroes.py @@ -0,0 +1,97 @@ +import uuid + +def get_generic_hero(): + hero_item = { + 'id': 'Generic_Hero_' + str(uuid.uuid4()), + 'lastName': 'Smith', + 'parents': None, + 'children': None, + 'address': { + 'state': 'FL', + 'city': 'Miami' + }, + 'saved': ['block'], + 'professional': False, + 'company': None + } + return hero_item + +def get_batman(): + hero_item = { + 'id': 'Batman', + 'lastName': 'Wayne', + 'parents': None, + 'children': None, + 'address': { + 'state': 'WA', + 'city': 'Gotham' + }, + 'saved': ['state', 'city'], + 'professional': True, + 'company': 'DC' + } + return hero_item + +def get_flash(): + hero_item = { + 'id': 'Flash', + 'lastName': 'Allen', + 'parents': None, + 'children': None, + 'address': { + 'state': 'NY', + 'city': 'New York' + }, + 'saved': ['world','country'], + 'professional': True, + 'company': 'DC' + } + return hero_item + +def get_superman(): + hero_item = { + 'id': 'Superman', + 'lastName': 'Kent', + 'parents': None, + 'children': None, + 'address': { + 'state': 'WA', + 'city': 'Metropolis' + }, + 'saved': ['universe','world','country', 'state'], + 'professional': True, + 'company': 'DC' + } + return hero_item + +def get_spider(): + hero_item = { + 'id': 'Spiderman', + 'lastName': 'Parker', + 'parents': None, + 'children': None, + 'address': { + 'state': 'NY', + 'city': 'New York' + }, + 'saved': ['galaxy','world','country'], + 'professional': True, + 'company': 'Marvel' + } + return hero_item + +def get_iron(): + hero_item = { + 'id': 'Ironman', + 'lastName': 'Stark', + 'parents': None, + 'children': None, + 'address': { + 'state': 'NY', + 'city': 'New York' + }, + 'saved': ['galaxy','world','country'], + 'professional': True, + 'company': 'Marvel' + } + return hero_item \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py new file mode 100644 index 000000000000..c26bbc42d81c --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -0,0 +1,169 @@ +import sys +sys.path.append(r"C:\Users\simonmoreno\Repos\azure-sdk-for-python\sdk\cosmos\azure-cosmos") + + +from azure.cosmos import container +from azure.core.tracing.decorator import distributed_trace +import asyncio +from azure.cosmos import partition_key, cosmos_client +from azure.cosmos.aio.cosmos_client_async import AsyncCosmosClient +import azure.cosmos.exceptions as exceptions +from azure.cosmos.partition_key import PartitionKey +from azure.cosmos.database import DatabaseProxy +from azure.cosmos.aio.database_async import DatabaseProxy + +import config +import heroes + +def get_azure_data(): + endpoint = "https://simonmoreno-sql.documents.azure.com:443/" + key = 'd3KEBamwtPiQpuuyFSlXEOF98cuhL8oqW3jQygmAfTOPImEZPN2yYWFd4IE5pQNdBF70v8I7LldjXB6fimMbrg==' + return [endpoint, key] + +def creation(): + + # + client = AsyncCosmosClient(get_azure_data()[0], get_azure_data()[1]) + # + database_name = 'MockHeroesDatabase' + database = client.create_database_if_not_exists(id=database_name) + # + + container_name = 'mockHeroesContainer' + container = database.create_container_if_not_exists( + id=container_name, + partition_key=PartitionKey(path="/lastName"), + offer_throughput=400 + ) + + real_heroes = [heroes.get_superman(), heroes.get_batman(), heroes.get_flash(), heroes.get_spider(), heroes.get_iron()] + generics = [heroes.get_generic_hero(), heroes.get_generic_hero(), heroes.get_generic_hero()] + + for hero in real_heroes: + container.create_item(body=hero) + + for generic in generics: + container.create_item(body=generic) + + for hero in real_heroes: + response = container.read_item(item=hero['id'], partition_key=hero['lastName']) + request_charge = container.client_connection.last_response_headers['x-ms-request-charge'] #! + if hero['id'] == 'Superman': print(container.client_connection.last_response_headers) + print('Read item with id {0}. Operation consumed {1} request units'.format(response['id'], (request_charge))) + + query = "SELECT * FROM c WHERE c.lastName IN ('Kent', 'Parker')" + + items = list(container.query_items( + query=query, + enable_cross_partition_query=True #! + )) + + request_charge = container.client_connection.last_response_headers['x-ms-request-charge'] #! + print('Query returned {0} items. Operation consumed {1} request units'.format(len(items), request_charge)) + +def clean_heroes(): + client = AsyncCosmosClient(get_azure_data()[0], get_azure_data()[1]) + database_name = 'MockHeroesDatabase' + database = client.get_database_client(database_name) + container_name = 'mockHeroesContainer' + container = database.get_container_client(container_name) + real_heroes = [heroes.get_superman(), heroes.get_batman(), heroes.get_flash(), heroes.get_spider(), heroes.get_iron()] + for h in real_heroes: + response = container.delete_item(h['id'], partition_key=h['lastName']) + print(response) + +def destroy(): + client = AsyncCosmosClient(get_azure_data()[0], get_azure_data()[1]) + database_name = 'MockHeroesDatabase' + response = client.delete_database(database_name) + print(f"Database with name {database_name} has been deleted.") + print(response) + +async def createaio(): + client = AsyncCosmosClient(get_azure_data()[0], get_azure_data()[1]) + database_name = 'MockHeroesDatabase' + database = client.create_database_if_not_exists(id=database_name) + + container_name = 'mockHeroesContainer' + container = database.create_container_if_not_exists( + id=container_name, + partition_key=PartitionKey(path="/lastName"), + offer_throughput=400 + ) + + real_heroes = [heroes.get_superman(), heroes.get_batman(), heroes.get_flash(), heroes.get_spider(), heroes.get_iron()] + generics = [heroes.get_generic_hero(), heroes.get_generic_hero(), heroes.get_generic_hero()] + + for hero in real_heroes: + await container.create_item_aio(body=hero) + + # for generic in generics: + # container.create_item_aio(body=generic) + +def get_db(): + client = AsyncCosmosClient(get_azure_data()[0], get_azure_data()[1]) + database_name = 'MockHeroesDatabase' + res = client.get_database_client(database_name).list_containers() + r= client.get_database_client('lols').list_containers() + x = client.get_database_client.three + + print(res) + print("OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO") + print(r) + print("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP") + + # for i in res: + # print(i) + for i in res: + print("CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC") + print(i) + +# asyncio.run(createaio()) + +# creation() + +import uuid + + +endpoint = "https://simonmoreno-sql.documents.azure.com:443/" +key = 'd3KEBamwtPiQpuuyFSlXEOF98cuhL8oqW3jQygmAfTOPImEZPN2yYWFd4IE5pQNdBF70v8I7LldjXB6fimMbrg==' + +def get_test_item(): + async_item = { + 'id': 'Async_' + str(uuid.uuid4()), + 'address': { + 'state': 'WA', + 'city': 'Redmond', + 'street': '1 Microsoft Way' + }, + 'test_object': True + } + return async_item + +def create_test(): + client = cosmos_client.CosmosClient(endpoint, key) + db = client.create_database(id="AsyncDB") + container = db.create_container( + id="AsyncContainer", + partition_key=PartitionKey(path="/id")) + ids = [] + for i in range(20): + body = get_test_item() + print(body.get("id")) + ids.append(body.get("id")) + container.create_item(body=body) + return ids + +async def async_read_test(): + # ids = create_test() + client = AsyncCosmosClient(endpoint, key) + db = client.get_database_client(id="AsyncDB") + container = db.get_container_client(id="AsyncContainer") + print(container.read()) + + + +asyncio.run(async_read_test()) \ No newline at end of file From 15dcceb042c2dabf04004a6f9af27ef83a89a7c7 Mon Sep 17 00:00:00 2001 From: annatisch Date: Fri, 20 Aug 2021 07:01:18 -0700 Subject: [PATCH 02/56] Client Constructor (#20310) * Removed some stuff * Looking at constructors * Updated request * Added client close * working client creation Co-authored-by: simorenoh --- .../azure/cosmos/aio/_asynchronous_request.py | 4 +- .../aio/_cosmos_client_connection_async.py | 2466 +---------------- .../aio/_global_endpoint_manager_async.py | 24 +- .../azure/cosmos/aio/_retry_utility.py | 196 ++ .../azure/cosmos/aio/container_async.py | 802 ------ .../azure/cosmos/aio/cosmos_client.py | 163 ++ .../azure/cosmos/aio/cosmos_client_async.py | 456 --- .../azure/cosmos/aio/database_async.py | 768 ----- .../azure/cosmos/cosmos_client.py | 3 + .../azure-cosmos/samples/simon_testfile.py | 31 +- 10 files changed, 395 insertions(+), 4518 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py delete mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py delete mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py delete mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index 986cb130b9ae..a1afc3f39e93 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -31,7 +31,7 @@ from .. import exceptions from .. import http_constants -from .. import _retry_utility +from . import _retry_utility from .._synchronized_request import _request_body_from_data @@ -174,7 +174,7 @@ async def AsynchronousRequest( request.headers[http_constants.HttpHeaders.ContentLength] = 0 # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries - return await _retry_utility.Execute( + return await _retry_utility.ExecuteAsync( client, global_endpoint_manager, _Request, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index c23ac5bde52e..645cb0b47e53 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -58,16 +58,15 @@ from .. import _global_endpoint_manager as global_endpoint_manager from . import _global_endpoint_manager_async as global_endpoint_manager_async from .._routing import routing_map_provider -from .._retry_utility import ConnectionRetryPolicy +from ._retry_utility import ConnectionRetryPolicy from .. import _session from .. import _utils from ..partition_key import _Undefined, _Empty -from .._cosmos_client_connection import CosmosClientConnection as BaseCosmosConnection # pylint: disable=protected-access -class CosmosClientConnection(BaseCosmosConnection, object): # pylint: disable=too-many-public-methods,too-many-instance-attributes +class CosmosClientConnection(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes """Represents a document client. Provides a client-side logical representation of the Azure Cosmos @@ -114,12 +113,6 @@ def __init__( The default consistency policy for client operations. """ - # super(CosmosClientConnection, self).__init__( - # url_connection=url_connection, - # auth=auth, - # connection_policy=connection_policy, - # consistency_level=consistency_level, - # **kwargs) self.url_connection = url_connection self.master_key = None @@ -204,12 +197,9 @@ def __init__( HttpLoggingPolicy(**kwargs), ] - # print(asyncio.get_event_loop()) - database_account = self._global_endpoint_manager._GetDatabaseAccount(**kwargs) - self._global_endpoint_manager.force_refresh(database_account) - transport = kwargs.pop("transport", None) self.pipeline_client = AsyncPipelineClient(base_url=url_connection, transport=transport, policies=policies) + self._setup_kwargs = kwargs # Query compatibility mode. # Allows to specify compatibility mode used by client when making query requests. Should be removed when @@ -219,2449 +209,7 @@ def __init__( # Routing map provider self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) - @property - def Session(self): - """Gets the session object from the client. """ - return self.session - - @Session.setter - def Session(self, session): - """Sets a session object on the document client. - - This will override the existing session - """ - self.session = session - - @property - def WriteEndpoint(self): - """Gets the curent write endpoint for a geo-replicated database account. - """ - return self._global_endpoint_manager.get_write_endpoint() - - @property - def ReadEndpoint(self): - """Gets the curent read endpoint for a geo-replicated database account. - """ - return self._global_endpoint_manager.get_read_endpoint() - - def RegisterPartitionResolver(self, database_link, partition_resolver): - """Registers the partition resolver associated with the database link - - :param str database_link: - Database Self Link or ID based link. - :param object partition_resolver: - An instance of PartitionResolver. - - """ - if not database_link: - raise ValueError("database_link is None or empty.") - - if partition_resolver is None: - raise ValueError("partition_resolver is None.") - - self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver} - - def GetPartitionResolver(self, database_link): - """Gets the partition resolver associated with the database link - - :param str database_link: - Database self link or ID based link. - - :return: - An instance of PartitionResolver. - :rtype: object - - """ - if not database_link: - raise ValueError("database_link is None or empty.") - - return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) - - async def CreateDatabase(self, database, options=None, **kwargs): - """Creates a database. - - :param dict database: - The Azure Cosmos database to create. - :param dict options: - The request options for the request. - - :return: - The Database that was created. - :rtype: dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(database) - path = "/dbs" - return await self.Create(database, path, "dbs", None, None, options, **kwargs) - - def ReadDatabase(self, database_link, options=None, **kwargs): - """Reads a database. - - :param str database_link: - The link to the database. - :param dict options: - The request options for the request. - - :return: - The Database that was read. - :rtype: dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(database_link) - database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.Read(path, "dbs", database_id, None, options, **kwargs) - - def ReadDatabases(self, options=None, **kwargs): - """Reads all databases. - - :param dict options: - The request options for the request. - - :return: - Query Iterable of Databases. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryDatabases(None, options, **kwargs) - - def QueryDatabases(self, query, options=None, **kwargs): - """Queries databases. - - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: Query Iterable of Databases. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - def fetch_fn(options): - return ( - self.__QueryFeed( - "/dbs", "dbs", "", lambda r: r["Databases"], - lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def ReadContainers(self, database_link, options=None, **kwargs): - """Reads all collections in a database. - - :param str database_link: - The link to the database. - :param dict options: - The request options for the request. - - :return: Query Iterable of Collections. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryContainers(database_link, None, options, **kwargs) - - def QueryContainers(self, database_link, query, options=None, **kwargs): - """Queries collections in a database. - - :param str database_link: - The link to the database. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: Query Iterable of Collections. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(database_link, "colls") - database_id = base.GetResourceIdOrFullNameFromLink(database_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "colls", database_id, lambda r: r["DocumentCollections"], - lambda _, body: body, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def CreateContainer(self, database_link, collection, options=None, **kwargs): - """Creates a collection in a database. - - :param str database_link: - The link to the database. - :param dict collection: - The Azure Cosmos collection to create. - :param dict options: - The request options for the request. - - :return: The Collection that was created. - :rtype: dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(collection) - path = base.GetPathFromLink(database_link, "colls") - database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.Create(collection, path, "colls", database_id, None, options, **kwargs) - - def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): - """Replaces a collection and return it. - - :param str collection_link: - The link to the collection entity. - :param dict collection: - The collection to be used. - :param dict options: - The request options for the request. - - :return: - The new Collection. - :rtype: - dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(collection) - path = base.GetPathFromLink(collection_link) - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) - - def ReadContainer(self, collection_link, options=None, **kwargs): - """Reads a collection. - - :param str collection_link: - The link to the document collection. - :param dict options: - The request options for the request. - - :return: - The read Collection. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link) - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.Read(path, "colls", collection_id, None, options, **kwargs) - - def CreateUser(self, database_link, user, options=None, **kwargs): - """Creates a user. - - :param str database_link: - The link to the database. - :param dict user: - The Azure Cosmos user to create. - :param dict options: - The request options for the request. - - :return: - The created User. - :rtype: - dict - - """ - if options is None: - options = {} - - database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) - return self.Create(user, path, "users", database_id, None, options, **kwargs) - - def UpsertUser(self, database_link, user, options=None, **kwargs): - """Upserts a user. - - :param str database_link: - The link to the database. - :param dict user: - The Azure Cosmos user to upsert. - :param dict options: - The request options for the request. - - :return: - The upserted User. - :rtype: dict - """ - if options is None: - options = {} - - database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) - return self.Upsert(user, path, "users", database_id, None, options, **kwargs) - - def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use - CosmosClientConnection.__ValidateResource(user) - path = base.GetPathFromLink(database_link, "users") - database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return database_id, path - - def ReadUser(self, user_link, options=None, **kwargs): - """Reads a user. - - :param str user_link: - The link to the user entity. - :param dict options: - The request options for the request. - - :return: - The read User. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(user_link) - user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.Read(path, "users", user_id, None, options, **kwargs) - - def ReadUsers(self, database_link, options=None, **kwargs): - """Reads all users in a database. - - :params str database_link: - The link to the database. - :params dict options: - The request options for the request. - :return: - Query iterable of Users. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryUsers(database_link, None, options, **kwargs) - - def QueryUsers(self, database_link, query, options=None, **kwargs): - """Queries users in a database. - - :param str database_link: - The link to the database. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of Users. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(database_link, "users") - database_id = base.GetResourceIdOrFullNameFromLink(database_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "users", database_id, lambda r: r["Users"], - lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def DeleteDatabase(self, database_link, options=None, **kwargs): - """Deletes a database. - - :param str database_link: - The link to the database. - :param dict options: - The request options for the request. - - :return: - The deleted Database. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(database_link) - database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) - - def CreatePermission(self, user_link, permission, options=None, **kwargs): - """Creates a permission for a user. - - :param str user_link: - The link to the user entity. - :param dict permission: - The Azure Cosmos user permission to create. - :param dict options: - The request options for the request. - - :return: - The created Permission. - :rtype: - dict - - """ - if options is None: - options = {} - - path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) - return self.Create(permission, path, "permissions", user_id, None, options, **kwargs) - - def UpsertPermission(self, user_link, permission, options=None, **kwargs): - """Upserts a permission for a user. - - :param str user_link: - The link to the user entity. - :param dict permission: - The Azure Cosmos user permission to upsert. - :param dict options: - The request options for the request. - - :return: - The upserted permission. - :rtype: - dict - - """ - if options is None: - options = {} - - path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) - return self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) - - def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use - CosmosClientConnection.__ValidateResource(permission) - path = base.GetPathFromLink(user_link, "permissions") - user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return path, user_id - - def ReadPermission(self, permission_link, options=None, **kwargs): - """Reads a permission. - - :param str permission_link: - The link to the permission. - :param dict options: - The request options for the request. - - :return: - The read permission. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(permission_link) - permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.Read(path, "permissions", permission_id, None, options, **kwargs) - - def ReadPermissions(self, user_link, options=None, **kwargs): - """Reads all permissions for a user. - - :param str user_link: - The link to the user entity. - :param dict options: - The request options for the request. - - :return: - Query Iterable of Permissions. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryPermissions(user_link, None, options, **kwargs) - - def QueryPermissions(self, user_link, query, options=None, **kwargs): - """Queries permissions for a user. - - :param str user_link: - The link to the user entity. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of Permissions. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(user_link, "permissions") - user_id = base.GetResourceIdOrFullNameFromLink(user_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def ReplaceUser(self, user_link, user, options=None, **kwargs): - """Replaces a user and return it. - - :param str user_link: - The link to the user entity. - :param dict user: - :param dict options: - The request options for the request. - - :return: - The new User. - :rtype: - dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(user) - path = base.GetPathFromLink(user_link) - user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.Replace(user, path, "users", user_id, None, options, **kwargs) - - def DeleteUser(self, user_link, options=None, **kwargs): - """Deletes a user. - - :param str user_link: - The link to the user entity. - :param dict options: - The request options for the request. - - :return: - The deleted user. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(user_link) - user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.DeleteResource(path, "users", user_id, None, options, **kwargs) - - def ReplacePermission(self, permission_link, permission, options=None, **kwargs): - """Replaces a permission and return it. - - :param str permission_link: - The link to the permission. - :param dict permission: - :param dict options: - The request options for the request. - - :return: - The new Permission. - :rtype: - dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(permission) - path = base.GetPathFromLink(permission_link) - permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) - - def DeletePermission(self, permission_link, options=None, **kwargs): - """Deletes a permission. - - :param str permission_link: - The link to the permission. - :param dict options: - The request options for the request. - - :return: - The deleted Permission. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(permission_link) - permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) - - def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): - """Reads all documents in a collection. - - :param str collection_link: - The link to the document collection. - :param dict feed_options: - - :return: - Query Iterable of Documents. - :rtype: - query_iterable.QueryIterable - - """ - if feed_options is None: - feed_options = {} - - return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) - - def QueryItems( - self, - database_or_container_link, - query, - options=None, - partition_key=None, - response_hook=None, - **kwargs - ): - """Queries documents in a collection. - - :param str database_or_container_link: - The link to the database when using partitioning, otherwise link to the document collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - :param str partition_key: - Partition key for the query(default value None) - :param response_hook: - A callable invoked with the response metadata - - :return: - Query Iterable of Documents. - :rtype: - query_iterable.QueryIterable - - """ - database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) - - if options is None: - options = {} - - if base.IsDatabaseLink(database_or_container_link): - return AsyncItemPaged( - self, - query, - options, - database_link=database_or_container_link, - partition_key=partition_key, - page_iterator_class=query_iterable.QueryIterable - ) - - path = base.GetPathFromLink(database_or_container_link, "docs") - collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, - "docs", - collection_id, - lambda r: r["Documents"], - lambda _, b: b, - query, - options, - response_hook=response_hook, - **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, - query, - options, - fetch_function=fetch_fn, - collection_link=database_or_container_link, - page_iterator_class=query_iterable.QueryIterable - ) - - def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): - """Queries documents change feed in a collection. - - :param str collection_link: - The link to the document collection. - :param dict options: - The request options for the request. - options may also specify partition key range id. - :param response_hook: - A callable invoked with the response metadata - - :return: - Query Iterable of Documents. - :rtype: - query_iterable.QueryIterable - - """ - - partition_key_range_id = None - if options is not None and "partitionKeyRangeId" in options: - partition_key_range_id = options["partitionKeyRangeId"] - - return self._QueryChangeFeed( - collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs - ) - - def _QueryChangeFeed( - self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, **kwargs - ): - """Queries change feed of a resource in a collection. - - :param str collection_link: - The link to the document collection. - :param str resource_type: - The type of the resource. - :param dict options: - The request options for the request. - :param str partition_key_range_id: - Specifies partition key range id. - :param response_hook: - A callable invoked with the response metadata - - :return: - Query Iterable of Documents. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - options["changeFeed"] = True - - resource_key_map = {"Documents": "docs"} - - # For now, change feed only supports Documents and Partition Key Range resouce type - if resource_type not in resource_key_map: - raise NotImplementedError(resource_type + " change feed query is not supported.") - - resource_key = resource_key_map[resource_type] - path = base.GetPathFromLink(collection_link, resource_key) - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, - resource_key, - collection_id, - lambda r: r[resource_type], - lambda _, b: b, - None, - options, - partition_key_range_id, - response_hook=response_hook, - **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, - None, - options, - fetch_function=fetch_fn, - collection_link=collection_link, - page_iterator_class=query_iterable.QueryIterable - ) - - def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): - """Reads Partition Key Ranges. - - :param str collection_link: - The link to the document collection. - :param dict feed_options: - - :return: - Query Iterable of PartitionKeyRanges. - :rtype: - query_iterable.QueryIterable - - """ - if feed_options is None: - feed_options = {} - - return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) - - def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): - """Queries Partition Key Ranges in a collection. - - :param str collection_link: - The link to the document collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of PartitionKeyRanges. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link, "pkranges") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], - lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def CreateItem(self, database_or_container_link, document, options=None, **kwargs): - """Creates a document in a collection. - - :param str database_or_container_link: - The link to the database when using partitioning, otherwise link to the document collection. - :param dict document: - The Azure Cosmos document to create. - :param dict options: - The request options for the request. - :param bool options['disableAutomaticIdGeneration']: - Disables the automatic id generation. If id is missing in the body and this - option is true, an error will be returned. - - :return: - The created Document. - :rtype: - dict - - """ - # Python's default arguments are evaluated once when the function is defined, - # not each time the function is called (like it is in say, Ruby). This means - # that if you use a mutable default argument and mutate it, you will and have - # mutated that object for all future calls to the function as well. So, using - # a non-mutable default in this case(None) and assigning an empty dict(mutable) - # inside the method For more details on this gotcha, please refer - # http://docs.python-guide.org/en/latest/writing/gotchas/ - if options is None: - options = {} - - # We check the link to be document collection link since it can be database - # link in case of client side partitioning - collection_id, document, path = self._GetContainerIdWithPathForItem( - database_or_container_link, document, options - ) - - if base.IsItemContainerLink(database_or_container_link): - options = self._AddPartitionKey(database_or_container_link, document, options) - - return self.Create(document, path, "docs", collection_id, None, options, **kwargs) - - def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): - """Upserts a document in a collection. - - :param str database_or_container_link: - The link to the database when using partitioning, otherwise link to the document collection. - :param dict document: - The Azure Cosmos document to upsert. - :param dict options: - The request options for the request. - :param bool options['disableAutomaticIdGeneration']: - Disables the automatic id generation. If id is missing in the body and this - option is true, an error will be returned. - - :return: - The upserted Document. - :rtype: - dict - - """ - # Python's default arguments are evaluated once when the function is defined, - # not each time the function is called (like it is in say, Ruby). This means - # that if you use a mutable default argument and mutate it, you will and have - # mutated that object for all future calls to the function as well. So, using - # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) - # inside the method For more details on this gotcha, please refer - # http://docs.python-guide.org/en/latest/writing/gotchas/ - if options is None: - options = {} - - # We check the link to be document collection link since it can be database - # link in case of client side partitioning - if base.IsItemContainerLink(database_or_container_link): - options = self._AddPartitionKey(database_or_container_link, document, options) - - collection_id, document, path = self._GetContainerIdWithPathForItem( - database_or_container_link, document, options - ) - return self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) - - PartitionResolverErrorMessage = ( - "Couldn't find any partition resolvers for the database link provided. " - + "Ensure that the link you used when registering the partition resolvers " - + "matches the link provided or you need to register both types of database " - + "link(self link as well as ID based link)." - ) - - # Gets the collection id and path for the document - def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): - - if not database_or_container_link: - raise ValueError("database_or_container_link is None or empty.") - - if document is None: - raise ValueError("document is None.") - - CosmosClientConnection.__ValidateResource(document) - document = document.copy() - if not document.get("id") and not options.get("disableAutomaticIdGeneration"): - document["id"] = base.GenerateGuidId() - - collection_link = database_or_container_link - - if base.IsDatabaseLink(database_or_container_link): - partition_resolver = self.GetPartitionResolver(database_or_container_link) - - if partition_resolver is not None: - collection_link = partition_resolver.ResolveForCreate(document) - else: - raise ValueError(CosmosClientConnection.PartitionResolverErrorMessage) - - path = base.GetPathFromLink(collection_link, "docs") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return collection_id, document, path - - def ReadItem(self, document_link, options=None, **kwargs): - """Reads a document. - - :param str document_link: - The link to the document. - :param dict options: - The request options for the request. - - :return: - The read Document. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(document_link) - document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return self.Read(path, "docs", document_id, None, options, **kwargs) - - async def ReadItemAsync(self, document_link, options=None, **kwargs): - """Reads a document. - - :param str document_link: - The link to the document. - :param dict options: - The request options for the request. - - :return: - The read Document. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(document_link) - document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return await self.ReadAsync(path, "docs", document_id, None, options, **kwargs) - - def ReadTriggers(self, collection_link, options=None, **kwargs): - """Reads all triggers in a collection. - - :param str collection_link: - The link to the document collection. - :param dict options: - The request options for the request. - - :return: - Query Iterable of Triggers. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryTriggers(collection_link, None, options, **kwargs) - - def QueryTriggers(self, collection_link, query, options=None, **kwargs): - """Queries triggers in a collection. - - :param str collection_link: - The link to the document collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of Triggers. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link, "triggers") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): - """Creates a trigger in a collection. - - :param str collection_link: - The link to the document collection. - :param dict trigger: - :param dict options: - The request options for the request. - - :return: - The created Trigger. - :rtype: - dict - - """ - if options is None: - options = {} - - collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) - return self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) - - def UpsertTrigger(self, collection_link, trigger, options=None, **kwargs): - """Upserts a trigger in a collection. - - :param str collection_link: - The link to the document collection. - :param dict trigger: - :param dict options: - The request options for the request. - - :return: - The upserted Trigger. - :rtype: - dict - - """ - if options is None: - options = {} - - collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) - return self.Upsert(trigger, path, "triggers", collection_id, None, options, **kwargs) - - def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use - CosmosClientConnection.__ValidateResource(trigger) - trigger = trigger.copy() - if trigger.get("serverScript"): - trigger["body"] = str(trigger.pop("serverScript", "")) - elif trigger.get("body"): - trigger["body"] = str(trigger["body"]) - - path = base.GetPathFromLink(collection_link, "triggers") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return collection_id, path, trigger - - def ReadTrigger(self, trigger_link, options=None, **kwargs): - """Reads a trigger. - - :param str trigger_link: - The link to the trigger. - :param dict options: - The request options for the request. - - :return: - The read Trigger. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(trigger_link) - trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.Read(path, "triggers", trigger_id, None, options, **kwargs) - - def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): - """Reads all user-defined functions in a collection. - - :param str collection_link: - The link to the document collection. - :param dict options: - The request options for the request. - - :return: - Query Iterable of UDFs. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) - - def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): - """Queries user-defined functions in a collection. - - :param str collection_link: - The link to the collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of UDFs. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link, "udfs") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], - lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): - """Creates a user-defined function in a collection. - - :param str collection_link: - The link to the collection. - :param str udf: - :param dict options: - The request options for the request. - - :return: - The created UDF. - :rtype: - dict - - """ - if options is None: - options = {} - - collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) - return self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) - - def UpsertUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): - """Upserts a user-defined function in a collection. - - :param str collection_link: - The link to the collection. - :param str udf: - :param dict options: - The request options for the request. - - :return: - The upserted UDF. - :rtype: - dict - - """ - if options is None: - options = {} - - collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) - return self.Upsert(udf, path, "udfs", collection_id, None, options, **kwargs) - - def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use - CosmosClientConnection.__ValidateResource(udf) - udf = udf.copy() - if udf.get("serverScript"): - udf["body"] = str(udf.pop("serverScript", "")) - elif udf.get("body"): - udf["body"] = str(udf["body"]) - - path = base.GetPathFromLink(collection_link, "udfs") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return collection_id, path, udf - - def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): - """Reads a user-defined function. - - :param str udf_link: - The link to the user-defined function. - :param dict options: - The request options for the request. - - :return: - The read UDF. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(udf_link) - udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.Read(path, "udfs", udf_id, None, options, **kwargs) - - def ReadStoredProcedures(self, collection_link, options=None, **kwargs): - """Reads all store procedures in a collection. - - :param str collection_link: - The link to the document collection. - :param dict options: - The request options for the request. - - :return: - Query Iterable of Stored Procedures. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryStoredProcedures(collection_link, None, options, **kwargs) - - def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): - """Queries stored procedures in a collection. - - :param str collection_link: - The link to the document collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of Stored Procedures. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link, "sprocs") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "sprocs", collection_id, lambda r: r["StoredProcedures"], - lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): - """Creates a stored procedure in a collection. - - :param str collection_link: - The link to the document collection. - :param str sproc: - :param dict options: - The request options for the request. - - :return: - The created Stored Procedure. - :rtype: - dict - - """ - if options is None: - options = {} - - collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) - return self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) - - def UpsertStoredProcedure(self, collection_link, sproc, options=None, **kwargs): - """Upserts a stored procedure in a collection. - - :param str collection_link: - The link to the document collection. - :param str sproc: - :param dict options: - The request options for the request. - - :return: - The upserted Stored Procedure. - :rtype: - dict - - """ - if options is None: - options = {} - - collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) - return self.Upsert(sproc, path, "sprocs", collection_id, None, options, **kwargs) - - def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use - CosmosClientConnection.__ValidateResource(sproc) - sproc = sproc.copy() - if sproc.get("serverScript"): - sproc["body"] = str(sproc.pop("serverScript", "")) - elif sproc.get("body"): - sproc["body"] = str(sproc["body"]) - path = base.GetPathFromLink(collection_link, "sprocs") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return collection_id, path, sproc - - def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): - """Reads a stored procedure. - - :param str sproc_link: - The link to the stored procedure. - :param dict options: - The request options for the request. - - :return: - The read Stored Procedure. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(sproc_link) - sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.Read(path, "sprocs", sproc_id, None, options, **kwargs) - - def ReadConflicts(self, collection_link, feed_options=None, **kwargs): - """Reads conflicts. - - :param str collection_link: - The link to the document collection. - :param dict feed_options: - - :return: - Query Iterable of Conflicts. - :rtype: - query_iterable.QueryIterable - - """ - if feed_options is None: - feed_options = {} - - return self.QueryConflicts(collection_link, None, feed_options, **kwargs) - - def QueryConflicts(self, collection_link, query, options=None, **kwargs): - """Queries conflicts in a collection. - - :param str collection_link: - The link to the document collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - - :return: - Query Iterable of Conflicts. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link, "conflicts") - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - - def fetch_fn(options): - return ( - self.__QueryFeed( - path, "conflicts", collection_id, lambda r: r["Conflicts"], - lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - def ReadConflict(self, conflict_link, options=None, **kwargs): - """Reads a conflict. - - :param str conflict_link: - The link to the conflict. - :param dict options: - - :return: - The read Conflict. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(conflict_link) - conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) - return self.Read(path, "conflicts", conflict_id, None, options, **kwargs) - - def DeleteContainer(self, collection_link, options=None, **kwargs): - """Deletes a collection. - - :param str collection_link: - The link to the document collection. - :param dict options: - The request options for the request. - - :return: - The deleted Collection. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(collection_link) - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) - - def ReplaceItem(self, document_link, new_document, options=None, **kwargs): - """Replaces a document and returns it. - - :param str document_link: - The link to the document. - :param dict new_document: - :param dict options: - The request options for the request. - - :return: - The new Document. - :rtype: - dict - - """ - CosmosClientConnection.__ValidateResource(new_document) - path = base.GetPathFromLink(document_link) - document_id = base.GetResourceIdOrFullNameFromLink(document_link) - - # Python's default arguments are evaluated once when the function is defined, - # not each time the function is called (like it is in say, Ruby). This means - # that if you use a mutable default argument and mutate it, you will and have - # mutated that object for all future calls to the function as well. So, using - # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) - # inside the function so that it remains local For more details on this gotcha, - # please refer http://docs.python-guide.org/en/latest/writing/gotchas/ - if options is None: - options = {} - - # Extract the document collection link and add the partition key to options - collection_link = base.GetItemContainerLink(document_link) - options = self._AddPartitionKey(collection_link, new_document, options) - - return self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) - - def DeleteItem(self, document_link, options=None, **kwargs): - """Deletes a document. - - :param str document_link: - The link to the document. - :param dict options: - The request options for the request. - - :return: - The deleted Document. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(document_link) - document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return self.DeleteResource(path, "docs", document_id, None, options, **kwargs) - - def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): - """Replaces a trigger and returns it. - - :param str trigger_link: - The link to the trigger. - :param dict trigger: - :param dict options: - The request options for the request. - - :return: - The replaced Trigger. - :rtype: - dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(trigger) - trigger = trigger.copy() - if trigger.get("serverScript"): - trigger["body"] = str(trigger["serverScript"]) - elif trigger.get("body"): - trigger["body"] = str(trigger["body"]) - - path = base.GetPathFromLink(trigger_link) - trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) - - def DeleteTrigger(self, trigger_link, options=None, **kwargs): - """Deletes a trigger. - - :param str trigger_link: - The link to the trigger. - :param dict options: - The request options for the request. - - :return: - The deleted Trigger. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(trigger_link) - trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) - - def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): - """Replaces a user-defined function and returns it. - - :param str udf_link: - The link to the user-defined function. - :param dict udf: - :param dict options: - The request options for the request. - - :return: - The new UDF. - :rtype: - dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(udf) - udf = udf.copy() - if udf.get("serverScript"): - udf["body"] = str(udf["serverScript"]) - elif udf.get("body"): - udf["body"] = str(udf["body"]) - - path = base.GetPathFromLink(udf_link) - udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) - - def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): - """Deletes a user-defined function. - - :param str udf_link: - The link to the user-defined function. - :param dict options: - The request options for the request. - - :return: - The deleted UDF. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(udf_link) - udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) - - def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): - """Executes a store procedure. - - :param str sproc_link: - The link to the stored procedure. - :param dict params: - List or None - :param dict options: - The request options for the request. - - :return: - The Stored Procedure response. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = dict(self.default_headers) - initial_headers.update({http_constants.HttpHeaders.Accept: (runtime_constants.MediaTypes.Json)}) - - if params and not isinstance(params, list): - params = [params] - - path = base.GetPathFromLink(sproc_link) - sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - headers = base.GetHeaders(self, initial_headers, "post", path, sproc_id, "sprocs", options) - - # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation - request_params = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) - result, self.last_response_headers = self.__Post(path, request_params, params, headers, **kwargs) - return result - - def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): - """Replaces a stored procedure and returns it. - - :param str sproc_link: - The link to the stored procedure. - :param dict sproc: - :param dict options: - The request options for the request. - - :return: - The replaced Stored Procedure. - :rtype: - dict - - """ - if options is None: - options = {} - - CosmosClientConnection.__ValidateResource(sproc) - sproc = sproc.copy() - if sproc.get("serverScript"): - sproc["body"] = str(sproc["serverScript"]) - elif sproc.get("body"): - sproc["body"] = str(sproc["body"]) - - path = base.GetPathFromLink(sproc_link) - sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) - - def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): - """Deletes a stored procedure. - - :param str sproc_link: - The link to the stored procedure. - :param dict options: - The request options for the request. - - :return: - The deleted Stored Procedure. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(sproc_link) - sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) - - def DeleteConflict(self, conflict_link, options=None, **kwargs): - """Deletes a conflict. - - :param str conflict_link: - The link to the conflict. - :param dict options: - The request options for the request. - - :return: - The deleted Conflict. - :rtype: - dict - - """ - if options is None: - options = {} - - path = base.GetPathFromLink(conflict_link) - conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) - return self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) - - def ReplaceOffer(self, offer_link, offer, **kwargs): - """Replaces an offer and returns it. - - :param str offer_link: - The link to the offer. - :param dict offer: - - :return: - The replaced Offer. - :rtype: - dict - - """ - CosmosClientConnection.__ValidateResource(offer) - path = base.GetPathFromLink(offer_link) - offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) - return self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) - - def ReadOffer(self, offer_link, **kwargs): - """Reads an offer. - - :param str offer_link: - The link to the offer. - - :return: - The read Offer. - :rtype: - dict - - """ - path = base.GetPathFromLink(offer_link) - offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) - return self.Read(path, "offers", offer_id, None, {}, **kwargs) - - def ReadOffers(self, options=None, **kwargs): - """Reads all offers. - - :param dict options: - The request options for the request - - :return: - Query Iterable of Offers. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - return self.QueryOffers(None, options, **kwargs) - - def QueryOffers(self, query, options=None, **kwargs): - """Query for all offers. - - :param (str or dict) query: - :param dict options: - The request options for the request - - :return: - Query Iterable of Offers. - :rtype: - query_iterable.QueryIterable - - """ - if options is None: - options = {} - - def fetch_fn(options): - return ( - self.__QueryFeed( - "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs - ), - self.last_response_headers, - ) - - return AsyncItemPaged( - self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable - ) - - async def GetDatabaseAccount(self, url_connection=None, **kwargs): - """Gets database account info. - - :return: - The Database Account. - :rtype: - documents.DatabaseAccount - - """ - if url_connection is None: - url_connection = self.url_connection - - initial_headers = dict(self.default_headers) - headers = base.GetHeaders(self, initial_headers, "get", "", "", "", {}) # path # id # type - - request_params = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) - result, self.last_response_headers = await self.__Get("", request_params, headers, **kwargs) - database_account = documents.DatabaseAccount() - database_account.DatabasesLink = "/dbs/" - database_account.MediaLink = "/media/" - if http_constants.HttpHeaders.MaxMediaStorageUsageInMB in self.last_response_headers: - database_account.MaxMediaStorageUsageInMB = self.last_response_headers[ - http_constants.HttpHeaders.MaxMediaStorageUsageInMB - ] - if http_constants.HttpHeaders.CurrentMediaStorageUsageInMB in self.last_response_headers: - database_account.CurrentMediaStorageUsageInMB = self.last_response_headers[ - http_constants.HttpHeaders.CurrentMediaStorageUsageInMB - ] - database_account.ConsistencyPolicy = result.get(constants._Constants.UserConsistencyPolicy) - - # WritableLocations and ReadableLocations fields will be available only for geo-replicated database accounts - if constants._Constants.WritableLocations in result: - database_account._WritableLocations = result[constants._Constants.WritableLocations] - if constants._Constants.ReadableLocations in result: - database_account._ReadableLocations = result[constants._Constants.ReadableLocations] - if constants._Constants.EnableMultipleWritableLocations in result: - database_account._EnableMultipleWritableLocations = result[ - constants._Constants.EnableMultipleWritableLocations - ] - - self._useMultipleWriteLocations = ( - self.connection_policy.UseMultipleWriteLocations and database_account._EnableMultipleWritableLocations - ) - return database_account - - async def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin - """Creates a Azure Cosmos resource and returns it. - - :param dict body: - :param str path: - :param str typ: - :param str id: - :param dict initial_headers: - :param dict options: - The request options for the request. - - :return: - The created Azure Cosmos resource. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = initial_headers or self.default_headers - headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) - # Create will use WriteEndpoint since it uses POST operation - - request_params = _request_object.RequestObject(typ, documents._OperationType.Create) - result, self.last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) - - # update session for write request - self._UpdateSessionIfRequired(headers, result, self.last_response_headers) - return result - - def Upsert(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin - """Upserts a Azure Cosmos resource and returns it. - - :param dict body: - :param str path: - :param str typ: - :param str id: - :param dict initial_headers: - :param dict options: - The request options for the request. - - :return: - The upserted Azure Cosmos resource. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = initial_headers or self.default_headers - headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) - - headers[http_constants.HttpHeaders.IsUpsert] = True - - # Upsert will use WriteEndpoint since it uses POST operation - request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) - result, self.last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) - # update session for write request - self._UpdateSessionIfRequired(headers, result, self.last_response_headers) - return result - - def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin - """Replaces a Azure Cosmos resource and returns it. - - :param dict resource: - :param str path: - :param str typ: - :param str id: - :param dict initial_headers: - :param dict options: - The request options for the request. - - :return: - The new Azure Cosmos resource. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = initial_headers or self.default_headers - headers = base.GetHeaders(self, initial_headers, "put", path, id, typ, options) - # Replace will use WriteEndpoint since it uses PUT operation - request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) - result, self.last_response_headers = self.__Put(path, request_params, resource, headers, **kwargs) - - # update session for request mutates data on server side - self._UpdateSessionIfRequired(headers, result, self.last_response_headers) - return result - - def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin - """Reads a Azure Cosmos resource and returns it. - - :param str path: - :param str typ: - :param str id: - :param dict initial_headers: - :param dict options: - The request options for the request. - - :return: - The upserted Azure Cosmos resource. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = initial_headers or self.default_headers - headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) - # Read will use ReadEndpoint since it uses GET operation - request_params = _request_object.RequestObject(typ, documents._OperationType.Read) - result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) - return result - - async def ReadAsync(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin - """Reads a Azure Cosmos resource and returns it. - - :param str path: - :param str typ: - :param str id: - :param dict initial_headers: - :param dict options: - The request options for the request. - - :return: - The upserted Azure Cosmos resource. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = initial_headers or self.default_headers - headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) - # Read will use ReadEndpoint since it uses GET operation - request_params = _request_object.RequestObject(typ, documents._OperationType.Read) - result, self.last_response_headers = await self.__Get(path, request_params, headers, **kwargs) - return result - - def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin - """Deletes a Azure Cosmos resource and returns it. - - :param str path: - :param str typ: - :param str id: - :param dict initial_headers: - :param dict options: - The request options for the request. - - :return: - The deleted Azure Cosmos resource. - :rtype: - dict - - """ - if options is None: - options = {} - - initial_headers = initial_headers or self.default_headers - headers = base.GetHeaders(self, initial_headers, "delete", path, id, typ, options) - # Delete will use WriteEndpoint since it uses DELETE operation - request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) - result, self.last_response_headers = self.__Delete(path, request_params, headers, **kwargs) - - # update session for request mutates data on server side - self._UpdateSessionIfRequired(headers, result, self.last_response_headers) - - return result - - async def __Get(self, path, request_params, req_headers, **kwargs): - """Azure Cosmos 'GET' http request. - - :params str url: - :params str path: - :params dict req_headers: - - :return: - Tuple of (result, headers). - :rtype: - tuple of (dict, dict) - - """ - request = self.pipeline_client.get(url=path, headers=req_headers) - return await asynchronous_request.AsynchronousRequest( - client=self, - request_params=request_params, - global_endpoint_manager=self._global_endpoint_manager, - connection_policy=self.connection_policy, - pipeline_client=self.pipeline_client, - request=request, - request_data=None, - **kwargs - ) - - def __GetSync(self, path, request_params, req_headers, **kwargs): - """Azure Cosmos 'GET' http request. - - :params str url: - :params str path: - :params dict req_headers: - - :return: - Tuple of (result, headers). - :rtype: - tuple of (dict, dict) - - """ - request = self.pipeline_client.get(url=path, headers=req_headers) - return synchronized_request.SynchronizedRequest( - client=self, - request_params=request_params, - global_endpoint_manager=self._global_endpoint_manager, - connection_policy=self.connection_policy, - pipeline_client=self.pipeline_client, - request=request, - request_data=None, - **kwargs - ) - - def __Post(self, path, request_params, body, req_headers, **kwargs): - """Azure Cosmos 'POST' http request. - - :params str url: - :params str path: - :params (str, unicode, dict) body: - :params dict req_headers: - - :return: - Tuple of (result, headers). - :rtype: - tuple of (dict, dict) - - """ - request = self.pipeline_client.post(url=path, headers=req_headers) - return synchronized_request.SynchronizedRequest( - client=self, - request_params=request_params, - global_endpoint_manager=self._global_endpoint_manager, - connection_policy=self.connection_policy, - pipeline_client=self.pipeline_client, - request=request, - request_data=body, - **kwargs - ) - - def __Put(self, path, request_params, body, req_headers, **kwargs): - """Azure Cosmos 'PUT' http request. - - :params str url: - :params str path: - :params (str, unicode, dict) body: - :params dict req_headers: - - :return: - Tuple of (result, headers). - :rtype: - tuple of (dict, dict) - - """ - request = self.pipeline_client.put(url=path, headers=req_headers) - return synchronized_request.SynchronizedRequest( - client=self, - request_params=request_params, - global_endpoint_manager=self._global_endpoint_manager, - connection_policy=self.connection_policy, - pipeline_client=self.pipeline_client, - request=request, - request_data=body, - **kwargs - ) - - def __Delete(self, path, request_params, req_headers, **kwargs): - """Azure Cosmos 'DELETE' http request. - - :params str url: - :params str path: - :params dict req_headers: - - :return: - Tuple of (result, headers). - :rtype: - tuple of (dict, dict) - - """ - request = self.pipeline_client.delete(url=path, headers=req_headers) - return synchronized_request.SynchronizedRequest( - client=self, - request_params=request_params, - global_endpoint_manager=self._global_endpoint_manager, - connection_policy=self.connection_policy, - pipeline_client=self.pipeline_client, - request=request, - request_data=None, - **kwargs - ) - - def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): - """Query Feed for Document Collection resource. - - :param str path: - Path to the document collection. - :param str collection_id: - Id of the document collection. - :param (str or dict) query: - :param dict options: - The request options for the request. - :param str partition_key_range_id: - Partition key range id. - :rtype: - tuple - - """ - return ( - self.__QueryFeed( - path, - "docs", - collection_id, - lambda r: r["Documents"], - lambda _, b: b, - query, - options, - partition_key_range_id, - **kwargs - ), - self.last_response_headers, - ) - - def __QueryFeed( - self, - path, - typ, - id_, - result_fn, - create_fn, - query, - options=None, - partition_key_range_id=None, - response_hook=None, - is_query_plan=False, - **kwargs - ): - """Query for more than one Azure Cosmos resources. - - :param str path: - :param str typ: - :param str id_: - :param function result_fn: - :param function create_fn: - :param (str or dict) query: - :param dict options: - The request options for the request. - :param str partition_key_range_id: - Specifies partition key range id. - :param function response_hook: - :param bool is_query_plan: - Specififes if the call is to fetch query plan - - :rtype: - list - - :raises SystemError: If the query compatibility mode is undefined. - - """ - if options is None: - options = {} - - if query: - __GetBodiesFromQueryResult = result_fn - else: - - def __GetBodiesFromQueryResult(result): - if result is not None: - return [create_fn(self, body) for body in result_fn(result)] - # If there is no change feed, the result data is empty and result is None. - # This case should be interpreted as an empty array. - return [] - - initial_headers = self.default_headers.copy() - # Copy to make sure that default_headers won't be changed. - if query is None: - # Query operations will use ReadEndpoint even though it uses GET(for feed requests) - request_params = _request_object.RequestObject(typ, - documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) - headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) - result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) - if response_hook: - response_hook(self.last_response_headers, result) - return __GetBodiesFromQueryResult(result) - - query = self.__CheckAndUnifyQueryFormat(query) - - initial_headers[http_constants.HttpHeaders.IsQuery] = "true" - if not is_query_plan: - initial_headers[http_constants.HttpHeaders.IsQuery] = "true" - - if ( - self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default - or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query - ): - initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.QueryJson - elif self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery: - initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.SQL - else: - raise SystemError("Unexpected query compatibility mode.") - - # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) - request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) - req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) - result, self.last_response_headers = self.__Post(path, request_params, query, req_headers, **kwargs) - - if response_hook: - response_hook(self.last_response_headers, result) - - return __GetBodiesFromQueryResult(result) - - def _GetQueryPlanThroughGateway(self, query, resource_link, **kwargs): - supported_query_features = (documents._QueryFeature.Aggregate + "," + - documents._QueryFeature.CompositeAggregate + "," + - documents._QueryFeature.Distinct + "," + - documents._QueryFeature.MultipleOrderBy + "," + - documents._QueryFeature.OffsetAndLimit + "," + - documents._QueryFeature.OrderBy + "," + - documents._QueryFeature.Top) - - options = { - "contentType": runtime_constants.MediaTypes.Json, - "isQueryPlanRequest": True, - "supportedQueryFeatures": supported_query_features, - "queryVersion": http_constants.Versions.QueryVersion - } - - resource_link = base.TrimBeginningAndEndingSlashes(resource_link) - path = base.GetPathFromLink(resource_link, "docs") - resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) - - return self.__QueryFeed(path, - "docs", - resource_id, - lambda r: r, - None, - query, - options, - is_query_plan=True, - **kwargs) - - def __CheckAndUnifyQueryFormat(self, query_body): - """Checks and unifies the format of the query body. - - :raises TypeError: If query_body is not of expected type (depending on the query compatibility mode). - :raises ValueError: If query_body is a dict but doesn\'t have valid query text. - :raises SystemError: If the query compatibility mode is undefined. - - :param (str or dict) query_body: - - :return: - The formatted query body. - :rtype: - dict or string - """ - if ( - self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default - or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query - ): - if not isinstance(query_body, dict) and not isinstance(query_body, six.string_types): - raise TypeError("query body must be a dict or string.") - if isinstance(query_body, dict) and not query_body.get("query"): - raise ValueError('query body must have valid query text with key "query".') - if isinstance(query_body, six.string_types): - return {"query": query_body} - elif ( - self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery - and not isinstance(query_body, six.string_types) - ): - raise TypeError("query body must be a string.") - else: - raise SystemError("Unexpected query compatibility mode.") - - return query_body - - @staticmethod - def __ValidateResource(resource): - id_ = resource.get("id") - if id_: - try: - if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1: - raise ValueError("Id contains illegal chars.") - - if id_[-1] == " ": - raise ValueError("Id ends with a space.") - except AttributeError: - raise_with_traceback(TypeError, message="Id type must be a string.") - - # Adds the partition key to options - def _AddPartitionKey(self, collection_link, document, options): - collection_link = base.TrimBeginningAndEndingSlashes(collection_link) - - # TODO: Refresh the cache if partition is extracted automatically and we get a 400.1001 - - # If the document collection link is present in the cache, then use the cached partitionkey definition - if collection_link in self.partition_key_definition_cache: - partitionKeyDefinition = self.partition_key_definition_cache.get(collection_link) - # Else read the collection from backend and add it to the cache - else: - collection = self.ReadContainer(collection_link) - partitionKeyDefinition = collection.get("partitionKey") - self.partition_key_definition_cache[collection_link] = partitionKeyDefinition - - # If the collection doesn't have a partition key definition, skip it as it's a legacy collection - if partitionKeyDefinition: - # If the user has passed in the partitionKey in options use that elase extract it from the document - if "partitionKey" not in options: - partitionKeyValue = self._ExtractPartitionKey(partitionKeyDefinition, document) - options["partitionKey"] = partitionKeyValue - - return options - - # Extracts the partition key from the document using the partitionKey definition - def _ExtractPartitionKey(self, partitionKeyDefinition, document): - - # Parses the paths into a list of token each representing a property - partition_key_parts = base.ParsePaths(partitionKeyDefinition.get("paths")) - # Check if the partitionKey is system generated or not - is_system_key = partitionKeyDefinition["systemKey"] if "systemKey" in partitionKeyDefinition else False - - # Navigates the document to retrieve the partitionKey specified in the paths - return self._retrieve_partition_key(partition_key_parts, document, is_system_key) - - # Navigates the document to retrieve the partitionKey specified in the partition key parts - def _retrieve_partition_key(self, partition_key_parts, document, is_system_key): - expected_matchCount = len(partition_key_parts) - matchCount = 0 - partitionKey = document - - for part in partition_key_parts: - # At any point if we don't find the value of a sub-property in the document, we return as Undefined - if part not in partitionKey: - return self._return_undefined_or_empty_partition_key(is_system_key) - - partitionKey = partitionKey.get(part) - matchCount += 1 - # Once we reach the "leaf" value(not a dict), we break from loop - if not isinstance(partitionKey, dict): - break - - # Match the count of hops we did to get the partitionKey with the length of - # partition key parts and validate that it's not a dict at that level - if (matchCount != expected_matchCount) or isinstance(partitionKey, dict): - return self._return_undefined_or_empty_partition_key(is_system_key) - - return partitionKey - - def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): - """ - Updates session if necessary. - - :param dict response_result: - :param dict response_headers: - :param dict response_headers - - :return: - None, but updates the client session if necessary. - - """ - - # if this request was made with consistency level as session, then update the session - if response_result is None or response_headers is None: - return - - is_session_consistency = False - if http_constants.HttpHeaders.ConsistencyLevel in request_headers: - if documents.ConsistencyLevel.Session == request_headers[http_constants.HttpHeaders.ConsistencyLevel]: - is_session_consistency = True - - if is_session_consistency: - # update session - self.session.update_session(response_result, response_headers) - - @staticmethod - def _return_undefined_or_empty_partition_key(is_system_key): - if is_system_key: - return _Empty - return _Undefined + async def _setup(self): + if not 'database_account' in self._setup_kwargs: + self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount(**self._setup_kwargs) + await self._global_endpoint_manager.force_refresh(self._setup_kwargs['database_account']) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 2248619fb335..089daf5a225d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -23,7 +23,7 @@ database service. """ -import threading +import asyncio from six.moves.urllib.parse import urlparse @@ -54,7 +54,7 @@ def __init__(self, client): self.refresh_time_interval_in_ms, ) self.refresh_needed = False - self.refresh_lock = threading.RLock() + self.refresh_lock = asyncio.RLock() self.last_refresh_time = 0 def get_refresh_time_interval_in_ms_stub(self): # pylint: disable=no-self-use @@ -84,21 +84,21 @@ def get_ordered_read_endpoints(self): def can_use_multiple_write_locations(self, request): return self.location_cache.can_use_multiple_write_locations_for_request(request) - def force_refresh(self, database_account): + async def force_refresh(self, database_account): self.refresh_needed = True - self.refresh_endpoint_list(database_account) + await self.refresh_endpoint_list(database_account) - def refresh_endpoint_list(self, database_account, **kwargs): - with self.refresh_lock: + async def refresh_endpoint_list(self, database_account, **kwargs): + async with self.refresh_lock: # if refresh is not needed or refresh is already taking place, return if not self.refresh_needed: return try: - self._refresh_endpoint_list_private(database_account, **kwargs) + await self._refresh_endpoint_list_private(database_account, **kwargs) except Exception as e: raise e - def _refresh_endpoint_list_private(self, database_account=None, **kwargs): + async def _refresh_endpoint_list_private(self, database_account=None, **kwargs): if database_account: self.location_cache.perform_on_database_account_read(database_account) self.refresh_needed = False @@ -108,12 +108,12 @@ def _refresh_endpoint_list_private(self, database_account=None, **kwargs): and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms ): if not database_account: - database_account = self._GetDatabaseAccount(**kwargs) + database_account = await self._GetDatabaseAccount(**kwargs) self.location_cache.perform_on_database_account_read(database_account) self.last_refresh_time = self.location_cache.current_time_millis() self.refresh_needed = False - def _GetDatabaseAccount(self, **kwargs): + async def _GetDatabaseAccount(self, **kwargs): """Gets the database account. First tries by using the default endpoint, and if that doesn't work, @@ -121,7 +121,7 @@ def _GetDatabaseAccount(self, **kwargs): specified, to get the database account. """ try: - database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) + database_account = await self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) return database_account # If for any reason(non-globaldb related), we are not able to get the database # account from the above call to GetDatabaseAccount, we would try to get this @@ -133,7 +133,7 @@ def _GetDatabaseAccount(self, **kwargs): for location_name in self.PreferredLocations: locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) try: - database_account = self._GetDatabaseAccountStub(locational_endpoint, **kwargs) + database_account = await self._GetDatabaseAccountStub(locational_endpoint, **kwargs) return database_account except exceptions.CosmosHttpResponseError: pass diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py new file mode 100644 index 000000000000..39e2bd1264e3 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py @@ -0,0 +1,196 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal methods for executing functions in the Azure Cosmos database service. +""" + +import time +import asyncio + +from azure.core.exceptions import AzureError, ClientAuthenticationError +from azure.core.pipeline.policies import AsyncRetryPolicy + +from .. import exceptions +from ..http_constants import HttpHeaders, StatusCodes, SubStatusCodes +from .._retry_utility import _configure_timeout +from .. import _endpoint_discovery_retry_policy +from .. import _resource_throttle_retry_policy +from .. import _default_retry_policy +from .. import _session_retry_policy + + +# pylint: disable=protected-access + + +async def ExecuteAsync(client, global_endpoint_manager, function, *args, **kwargs): + """Executes the function with passed parameters applying all retry policies + + :param object client: + Document client instance + :param object global_endpoint_manager: + Instance of _GlobalEndpointManager class + :param function function: + Function to be called wrapped with retries + :param (non-keyworded, variable number of arguments list) *args: + :param (keyworded, variable number of arguments list) **kwargs: + + """ + # instantiate all retry policies here to be applied for each request execution + endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy( + client.connection_policy, global_endpoint_manager, *args + ) + + resourceThrottle_retry_policy = _resource_throttle_retry_policy.ResourceThrottleRetryPolicy( + client.connection_policy.RetryOptions.MaxRetryAttemptCount, + client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, + client.connection_policy.RetryOptions.MaxWaitTimeInSeconds, + ) + defaultRetry_policy = _default_retry_policy.DefaultRetryPolicy(*args) + + sessionRetry_policy = _session_retry_policy._SessionRetryPolicy( + client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args + ) + while True: + try: + client_timeout = kwargs.get('timeout') + start_time = time.time() + if args: + result = await ExecuteFunctionAsync(function, global_endpoint_manager, *args, **kwargs) + else: + result = await ExecuteFunctionAsync(function, *args, **kwargs) + if not client.last_response_headers: + client.last_response_headers = {} + + # setting the throttle related response headers before returning the result + client.last_response_headers[ + HttpHeaders.ThrottleRetryCount + ] = resourceThrottle_retry_policy.current_retry_attempt_count + client.last_response_headers[ + HttpHeaders.ThrottleRetryWaitTimeInMs + ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds + + return result + except exceptions.CosmosHttpResponseError as e: + retry_policy = None + if e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN: + retry_policy = endpointDiscovery_retry_policy + elif e.status_code == StatusCodes.TOO_MANY_REQUESTS: + retry_policy = resourceThrottle_retry_policy + elif ( + e.status_code == StatusCodes.NOT_FOUND + and e.sub_status + and e.sub_status == SubStatusCodes.READ_SESSION_NOTAVAILABLE + ): + retry_policy = sessionRetry_policy + else: + retry_policy = defaultRetry_policy + + # If none of the retry policies applies or there is no retry needed, set the + # throttle related response hedaers and re-throw the exception back arg[0] + # is the request. It needs to be modified for write forbidden exception + if not retry_policy.ShouldRetry(e): + if not client.last_response_headers: + client.last_response_headers = {} + client.last_response_headers[ + HttpHeaders.ThrottleRetryCount + ] = resourceThrottle_retry_policy.current_retry_attempt_count + client.last_response_headers[ + HttpHeaders.ThrottleRetryWaitTimeInMs + ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds + if args and args[0].should_clear_session_token_on_session_read_failure: + client.session.clear_session_token(client.last_response_headers) + raise + + # Wait for retry_after_in_milliseconds time before the next retry + await asyncio.sleep(retry_policy.retry_after_in_milliseconds / 1000.0) + if client_timeout: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise exceptions.CosmosClientTimeoutError() + + +async def ExecuteFunctionAsync(function, *args, **kwargs): + """Stub method so that it can be used for mocking purposes as well. + """ + return await function(*args, **kwargs) + + +class ConnectionRetryPolicy(AsyncRetryPolicy): + + def __init__(self, **kwargs): + clean_kwargs = {k: v for k, v in kwargs.items() if v is not None} + super(ConnectionRetryPolicy, self).__init__(**clean_kwargs) + + async def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + Also enforces an absolute client-side timeout that spans multiple retry attempts. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises ~azure.core.exceptions.AzureError: Maximum retries exceeded. + :raises ~azure.cosmos.exceptions.CosmosClientTimeoutError: Specified timeout exceeded. + :raises ~azure.core.exceptions.ClientAuthenticationError: Authentication failed. + """ + absolute_timeout = request.context.options.pop('timeout', None) + per_request_timeout = request.context.options.pop('connection_timeout', 0) + + retry_error = None + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + while retry_active: + try: + start_time = time.time() + _configure_timeout(request, absolute_timeout, per_request_timeout) + + response = await self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + await self.sleep(retry_settings, request.context.transport, response=response) + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except exceptions.CosmosClientTimeoutError as timeout_error: + timeout_error.inner_exception = retry_error + timeout_error.response = response + timeout_error.history = retry_settings['history'] + raise + except AzureError as err: + retry_error = err + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + await self.sleep(retry_settings, request.context.transport) + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py deleted file mode 100644 index 6120d533c918..000000000000 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container_async.py +++ /dev/null @@ -1,802 +0,0 @@ -# The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation - -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Create, read, update and delete items in the Azure Cosmos DB SQL API service. -""" - -from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import - -import six -import asyncio -import time -from azure.core.tracing.decorator import distributed_trace # type: ignore - -from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options -from ..exceptions import CosmosResourceNotFoundError -from ..http_constants import StatusCodes -from ..offer import Offer -from ..scripts import ScriptsProxy -from ..partition_key import NonePartitionKeyValue - -__all__ = ("ContainerProxy",) - -# pylint: disable=protected-access -# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs - - -class ContainerProxy(object): - """An interface to interact with a specific DB Container. - - This class should not be instantiated directly. Instead, use the - :func:`DatabaseProxy.get_container_client` method to get an existing - container, or the :func:`Database.create_container` method to create a - new container. - - A container in an Azure Cosmos DB SQL API database is a collection of - documents, each of which is represented as an Item. - - :ivar str id: ID (name) of the container - :ivar str session_token: The session token for the container. - """ - - def __init__(self, client_connection, database_link, id, properties=None): # pylint: disable=redefined-builtin - # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None - self.client_connection = client_connection - self.id = id - self._properties = properties - self.container_link = u"{}/colls/{}".format(database_link, self.id) - self._is_system_key = None - self._scripts = None # type: Optional[ScriptsProxy] - - def __repr__(self): - # type () -> str - return "".format(self.container_link)[:1024] - - def _get_properties(self): - # type: () -> Dict[str, Any] - if self._properties is None: - self._properties = self.read() - return self._properties - - @property - def is_system_key(self): - # type: () -> bool - if self._is_system_key is None: - properties = self._get_properties() - self._is_system_key = ( - properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False - ) - return cast('bool', self._is_system_key) - - @property - def scripts(self): - # type: () -> ScriptsProxy - if self._scripts is None: - self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) - return cast('ScriptsProxy', self._scripts) - - def _get_document_link(self, item_or_link): - # type: (Union[Dict[str, Any], str]) -> str - if isinstance(item_or_link, six.string_types): - return u"{}/docs/{}".format(self.container_link, item_or_link) - return item_or_link["_self"] - - def _get_conflict_link(self, conflict_or_link): - # type: (Union[Dict[str, Any], str]) -> str - if isinstance(conflict_or_link, six.string_types): - return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) - return conflict_or_link["_self"] - - def _set_partition_key(self, partition_key): - if partition_key == NonePartitionKeyValue: - return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) - return partition_key - - @distributed_trace - def read( - self, - populate_query_metrics=None, # type: Optional[bool] - populate_partition_key_range_statistics=None, # type: Optional[bool] - populate_quota_info=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, Any] - """Read the container properties. - - :param populate_query_metrics: Enable returning query metrics in response headers. - :param populate_partition_key_range_statistics: Enable returning partition key - range statistics in response headers. - :param populate_quota_info: Enable returning collection storage quota information in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. - This includes if the container does not exist. - :returns: Dict representing the retrieved container. - :rtype: dict[str, Any] - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if populate_partition_key_range_statistics is not None: - request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics - if populate_quota_info is not None: - request_options["populateQuotaInfo"] = populate_quota_info - - collection_link = self.container_link - self._properties = self.client_connection.ReadContainer( - collection_link, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, self._properties) - - return cast('Dict[str, Any]', self._properties) - - @distributed_trace - async def read_item( - self, - item, # type: Union[str, Dict[str, Any]] - partition_key, # type: Any - populate_query_metrics=None, # type: Optional[bool] - post_trigger_include=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, str] - """Get the item identified by `item`. - - :param item: The ID (name) or dict representing item to retrieve. - :param partition_key: Partition key for the item to retrieve. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param post_trigger_include: trigger id to be used as post operation trigger. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: Dict representing the item to be retrieved. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. - :rtype: dict[str, Any] - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START update_item] - :end-before: [END update_item] - :language: python - :dedent: 0 - :caption: Get an item from the database and update one of its properties: - :name: update_item - """ - doc_link = self._get_document_link(item) - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - - result = await self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def read_all_items( - self, - max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """List all the items in the container. - - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of items (dicts). - :rtype: Iterable[dict[str, Any]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics - - if hasattr(response_hook, "clear"): - response_hook.clear() - - items = self.client_connection.ReadItems( - collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, items) - return items - - @distributed_trace - def query_items_change_feed( - self, - partition_key_range_id=None, # type: Optional[str] - is_start_from_beginning=False, # type: bool - continuation=None, # type: Optional[str] - max_item_count=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """Get a sorted list of items that were changed, in the order in which they were modified. - - :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. - This is used to process the change feed in parallel across multiple consumers. - :param partition_key: partition key at which ChangeFeed requests are targetted. - :param is_start_from_beginning: Get whether change feed should start from - beginning (true) or from current (false). By default it's start from current (false). - :param continuation: e_tag value to be used as continuation for reading change feed. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of items (dicts). - :rtype: Iterable[dict[str, Any]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if partition_key_range_id is not None: - feed_options["partitionKeyRangeId"] = partition_key_range_id - partition_key = kwargs.pop("partitionKey", None) - if partition_key is not None: - feed_options["partitionKey"] = partition_key - if is_start_from_beginning is not None: - feed_options["isStartFromBeginning"] = is_start_from_beginning - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if continuation is not None: - feed_options["continuation"] = continuation - - if hasattr(response_hook, "clear"): - response_hook.clear() - - result = self.client_connection.QueryItemsChangeFeed( - self.container_link, options=feed_options, response_hook=response_hook, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def query_items( - self, - query, # type: str - parameters=None, # type: Optional[List[Dict[str, object]]] - partition_key=None, # type: Optional[Any] - enable_cross_partition_query=None, # type: Optional[bool] - max_item_count=None, # type: Optional[int] - enable_scan_in_query=None, # type: Optional[bool] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """Return all results matching the given `query`. - - You can use any value for the container name in the FROM clause, but - often the container name is used. In the examples below, the container - name is "products," and is aliased as "p" for easier referencing in - the WHERE clause. - - :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. - Each parameter is a dict() with 'name' and 'value' keys. - Ignored if no query is provided. - :param partition_key: Specifies the partition key value for the item. - :param enable_cross_partition_query: Allows sending of more than one request to - execute the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param enable_scan_in_query: Allow scan on the queries which couldn't be served as - indexing was opted out on the requested paths. - :param populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of items (dicts). - :rtype: Iterable[dict[str, Any]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START query_items] - :end-before: [END query_items] - :language: python - :dedent: 0 - :caption: Get all products that have not been discontinued: - :name: query_items - - .. literalinclude:: ../samples/examples.py - :start-after: [START query_items_param] - :end-before: [END query_items_param] - :language: python - :dedent: 0 - :caption: Parameterized query to get all products that have been discontinued: - :name: query_items_param - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if enable_cross_partition_query is not None: - feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics - if partition_key is not None: - feed_options["partitionKey"] = self._set_partition_key(partition_key) - if enable_scan_in_query is not None: - feed_options["enableScanInQuery"] = enable_scan_in_query - - if hasattr(response_hook, "clear"): - response_hook.clear() - - items = self.client_connection.QueryItems( - database_or_container_link=self.container_link, - query=query if parameters is None else dict(query=query, parameters=parameters), - options=feed_options, - partition_key=partition_key, - response_hook=response_hook, - **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, items) - return items - - @distributed_trace - def replace_item( - self, - item, # type: Union[str, Dict[str, Any]] - body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, str] - """Replaces the specified item if it exists in the container. - - If the item does not already exist in the container, an exception is raised. - - :param item: The ID (name) or dict representing item to be replaced. - :param body: A dict-like object representing the item to replace. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A dict representing the item after replace went through. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace failed or the item with - given id does not exist. - :rtype: dict[str, Any] - """ - item_link = self._get_document_link(item) - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - request_options["disableIdGeneration"] = True - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include is not None: - request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - - result = self.client_connection.ReplaceItem( - document_link=item_link, new_document=body, options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def upsert_item( - self, - body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, str] - """Insert or update the specified item. - - If the item already exists in the container, it is replaced. If the item - does not already exist, it is inserted. - - :param body: A dict-like object representing the item to update or insert. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A dict representing the upserted item. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. - :rtype: dict[str, Any] - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - request_options["disableIdGeneration"] = True - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include is not None: - request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - - result = self.client_connection.UpsertItem( - database_or_container_link=self.container_link, - document=body, - options=request_options, - **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def create_item( - self, - body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - indexing_directive=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, str] - """Create an item in the container. - - To update or replace an existing item, use the - :func:`ContainerProxy.upsert_item` method. - - :param body: A dict-like object representing the item to create. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :param indexing_directive: Indicate whether the document should be omitted from indexing. - :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A dict representing the new item. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. - :rtype: dict[str, Any] - """ - start = time.time() - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) - if populate_query_metrics: - request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include is not None: - request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - if indexing_directive is not None: - request_options["indexingDirective"] = indexing_directive - - result = self.client_connection.CreateItem( - database_or_container_link=self.container_link, document=body, options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - print(f"Create item took {(time.time() - start) * 1000} ms") - print("ASYNC CONTAINER USED") - return result - - @distributed_trace - async def create_item_aio( - self, - body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - indexing_directive=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, str] - """Create an item in the container. - - To update or replace an existing item, use the - :func:`ContainerProxy.upsert_item` method. - - :param body: A dict-like object representing the item to create. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :param indexing_directive: Indicate whether the document should be omitted from indexing. - :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A dict representing the new item. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. - :rtype: dict[str, Any] - """ - start = time.time() - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) - if populate_query_metrics: - request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include is not None: - request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - if indexing_directive is not None: - request_options["indexingDirective"] = indexing_directive - - result = await self.client_connection.CreateItemAIO( - database_or_container_link=self.container_link, document=body, options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) #what is this doing? can't find function - print(f"Create item took {(time.time() - start) * 1000} ms") - return result - - @distributed_trace - def delete_item( - self, - item, # type: Union[Dict[str, Any], str] - partition_key, # type: Any - populate_query_metrics=None, # type: Optional[bool] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - """Delete the specified item from the container. - - If the item does not already exist in the container, an exception is raised. - - :param item: The ID (name) or dict representing item to be deleted. - :param partition_key: Specifies the partition key value for the item. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. - :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The item does not exist in the container. - :rtype: None - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include is not None: - request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - - document_link = self._get_document_link(item) - result = self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - - @distributed_trace - def read_offer(self, **kwargs): - # type: (Any) -> Offer - """Read the Offer object for this container. - - If no Offer already exists for the container, an exception is raised. - - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: Offer for the container. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No offer exists for the container or - the offer could not be retrieved. - :rtype: ~azure.cosmos.Offer - """ - response_hook = kwargs.pop('response_hook', None) - properties = self._get_properties() - link = properties["_self"] - query_spec = { - "query": "SELECT * FROM root r WHERE r.resource=@link", - "parameters": [{"name": "@link", "value": link}], - } - offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) - if not offers: - raise CosmosResourceNotFoundError( - status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for container " + self.container_link) - - if response_hook: - response_hook(self.client_connection.last_response_headers, offers) - - return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) - - @distributed_trace - def replace_throughput(self, throughput, **kwargs): - # type: (int, Any) -> Offer - """Replace the container's throughput. - - If no Offer already exists for the container, an exception is raised. - - :param throughput: The throughput to be set (an integer). - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: Offer for the container, updated with new throughput. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No offer exists for the container - or the offer could not be updated. - :rtype: ~azure.cosmos.Offer - """ - response_hook = kwargs.pop('response_hook', None) - properties = self._get_properties() - link = properties["_self"] - query_spec = { - "query": "SELECT * FROM root r WHERE r.resource=@link", - "parameters": [{"name": "@link", "value": link}], - } - offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) - if not offers: - raise CosmosResourceNotFoundError( - status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for container " + self.container_link) - new_offer = offers[0].copy() - new_offer["content"]["offerThroughput"] = throughput - data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) - - if response_hook: - response_hook(self.client_connection.last_response_headers, data) - - return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) - - @distributed_trace - def list_conflicts(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] - """List all the conflicts in the container. - - :param max_item_count: Max number of items to be returned in the enumeration operation. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of conflicts (dicts). - :rtype: Iterable[dict[str, Any]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - - result = self.client_connection.ReadConflicts( - collection_link=self.container_link, feed_options=feed_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def query_conflicts( - self, - query, # type: str - parameters=None, # type: Optional[List[str]] - enable_cross_partition_query=None, # type: Optional[bool] - partition_key=None, # type: Optional[Any] - max_item_count=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """Return all conflicts matching a given `query`. - - :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param enable_cross_partition_query: Allows sending of more than one request to execute - the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. - :param partition_key: Specifies the partition key value for the item. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of conflicts (dicts). - :rtype: Iterable[dict[str, Any]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if enable_cross_partition_query is not None: - feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query - if partition_key is not None: - feed_options["partitionKey"] = self._set_partition_key(partition_key) - - result = self.client_connection.QueryConflicts( - collection_link=self.container_link, - query=query if parameters is None else dict(query=query, parameters=parameters), - options=feed_options, - **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def get_conflict(self, conflict, partition_key, **kwargs): - # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] - """Get the conflict identified by `conflict`. - - :param conflict: The ID (name) or dict representing the conflict to retrieve. - :param partition_key: Partition key for the conflict to retrieve. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A dict representing the retrieved conflict. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given conflict couldn't be retrieved. - :rtype: dict[str, Any] - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) - - result = self.client_connection.ReadConflict( - conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def delete_conflict(self, conflict, partition_key, **kwargs): - # type: (Union[str, Dict[str, Any]], Any, Any) -> None - """Delete a specified conflict from the container. - - If the conflict does not already exist in the container, an exception is raised. - - :param conflict: The ID (name) or dict representing the conflict to be deleted. - :param partition_key: Partition key for the conflict to delete. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. - :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The conflict does not exist in the container. - :rtype: None - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) - - result = self.client_connection.DeleteConflict( - conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py new file mode 100644 index 000000000000..26166d14bf6c --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -0,0 +1,163 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, and delete databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Optional, Union, cast, Iterable, List # pylint: disable=unused-import + +import six +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ..cosmos_client import _parse_connection_str, _build_auth +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from ._retry_utility import ConnectionRetryPolicy +# from .database import DatabaseProxy +from ..documents import ConnectionPolicy, DatabaseAccount +from ..exceptions import CosmosResourceNotFoundError + +__all__ = ("CosmosClient",) + + +def _build_connection_policy(kwargs): + # type: (Dict[str, Any]) -> ConnectionPolicy + # pylint: disable=protected-access + policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() + + # Connection config + policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ + kwargs.pop('connection_timeout', None) or \ + policy.RequestTimeout + policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode + policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration + policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', None) or policy.EnableEndpointDiscovery + policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations + policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ + policy.UseMultipleWriteLocations + + # SSL config + verify = kwargs.pop('connection_verify', None) + policy.DisableSSLVerification = not bool(verify if verify is not None else True) + ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration + if ssl: + ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile + ssl.SSLCaCerts = verify or ssl.SSLCaCerts + policy.SSLConfiguration = ssl + + # Retry config + retry = kwargs.pop('retry_options', None) or policy.RetryOptions + total_retries = kwargs.pop('retry_total', None) + retry._max_retry_attempt_count = total_retries or retry._max_retry_attempt_count + retry._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ + retry._fixed_retry_interval_in_milliseconds + max_backoff = kwargs.pop('retry_backoff_max', None) + retry._max_wait_time_in_seconds = max_backoff or retry._max_wait_time_in_seconds + policy.RetryOptions = retry + connection_retry = kwargs.pop('connection_retry_policy', None) or policy.ConnectionRetryConfiguration + if not connection_retry: + connection_retry = ConnectionRetryPolicy( + retry_total=total_retries, + retry_connect=kwargs.pop('retry_connect', None), + retry_read=kwargs.pop('retry_read', None), + retry_status=kwargs.pop('retry_status', None), + retry_backoff_max=max_backoff, + retry_on_status_codes=kwargs.pop('retry_on_status_codes', []), + retry_backoff_factor=kwargs.pop('retry_backoff_factor', 0.8), + ) + policy.ConnectionRetryConfiguration = connection_retry + + return policy + + + +class CosmosClient(object): + """A client-side logical representation of an Azure Cosmos DB account. + + Use this client to configure and execute requests to the Azure Cosmos DB service. + + :param str url: The URL of the Cosmos DB account. + :param credential: Can be the account key, or a dictionary of resource tokens. + :type credential: str or dict[str, str] + :param str consistency_level: Consistency level to use for the session. The default value is "Session". + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client + """ + + def __init__(self, url, credential, **kwargs): + # type: (str, Any, str, Any) -> None + """Instantiate a new CosmosClient.""" + auth = _build_auth(credential) + consistency_level = kwargs.get('consistency_level', 'Session') + connection_policy = _build_connection_policy(kwargs) + self.client_connection = CosmosClientConnection( + url, + auth=auth, + consistency_level=consistency_level, + connection_policy=connection_policy, + **kwargs + ) + + def __repr__(self): + # type () -> str + return "".format(self.client_connection.url_connection)[:1024] + + async def __aenter__(self): + await self.client_connection.pipeline_client.__aenter__() + await self.client_connection._setup() + return self + + async def __aexit__(self, *args): + return await self.client_connection.pipeline_client.__aexit__(*args) + + async def close(self): + await self.__aexit__() + + @classmethod + def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): + # type: (str, Optional[Any], str, Any) -> CosmosClient + """Create a CosmosClient instance from a connection string. + + This can be retrieved from the Azure portal.For full list of optional + keyword arguments, see the CosmosClient constructor. + + :param str conn_str: The connection string. + :param credential: Alternative credentials to use instead of the key + provided in the connection string. + :type credential: str or dict(str, str) + :param str consistency_level: + Consistency level to use for the session. The default value is "Session". + """ + settings = _parse_connection_str(conn_str, credential) + return cls( + url=settings['AccountEndpoint'], + credential=credential or settings['AccountKey'], + consistency_level=consistency_level, + **kwargs + ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py deleted file mode 100644 index 879cafcf7f9f..000000000000 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client_async.py +++ /dev/null @@ -1,456 +0,0 @@ -# The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation - -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Create, read, and delete databases in the Azure Cosmos DB SQL API service. -""" - -from typing import Any, Dict, Optional, Union, cast, Iterable, List # pylint: disable=unused-import - -import six -from azure.core.tracing.decorator import distributed_trace # type: ignore - -from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options -from .._retry_utility import ConnectionRetryPolicy -from .database_async import DatabaseProxy -from ..documents import ConnectionPolicy, DatabaseAccount -from ..exceptions import CosmosResourceNotFoundError - -__all__ = ("CosmosClient",) - - -def _parse_connection_str(conn_str, credential): - # type: (str, Optional[Any]) -> Dict[str, str] - conn_str = conn_str.rstrip(";") - conn_settings = dict( # type: ignore # pylint: disable=consider-using-dict-comprehension - s.split("=", 1) for s in conn_str.split(";") - ) - if 'AccountEndpoint' not in conn_settings: - raise ValueError("Connection string missing setting 'AccountEndpoint'.") - if not credential and 'AccountKey' not in conn_settings: - raise ValueError("Connection string missing setting 'AccountKey'.") - return conn_settings - - -def _build_auth(credential): - # type: (Any) -> Dict[str, Any] - auth = {} - if isinstance(credential, six.string_types): - auth['masterKey'] = credential - elif isinstance(credential, dict): - if any(k for k in credential.keys() if k in ['masterKey', 'resourceTokens', 'permissionFeed']): - return credential # Backwards compatible - auth['resourceTokens'] = credential # type: ignore - elif hasattr(credential, '__iter__'): - auth['permissionFeed'] = credential - else: - raise TypeError( - "Unrecognized credential type. Please supply the master key as str, " - "or a dictionary or resource tokens, or a list of permissions.") - return auth - - -def _build_connection_policy(kwargs): - # type: (Dict[str, Any]) -> ConnectionPolicy - # pylint: disable=protected-access - policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() - - # Connection config - policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ - kwargs.pop('connection_timeout', None) or \ - policy.RequestTimeout - policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode - policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration - policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', None) or policy.EnableEndpointDiscovery - policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations - policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ - policy.UseMultipleWriteLocations - - # SSL config - verify = kwargs.pop('connection_verify', None) - policy.DisableSSLVerification = not bool(verify if verify is not None else True) - ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration - if ssl: - ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile - ssl.SSLCaCerts = verify or ssl.SSLCaCerts - policy.SSLConfiguration = ssl - - # Retry config - retry = kwargs.pop('retry_options', None) or policy.RetryOptions - total_retries = kwargs.pop('retry_total', None) - retry._max_retry_attempt_count = total_retries or retry._max_retry_attempt_count - retry._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ - retry._fixed_retry_interval_in_milliseconds - max_backoff = kwargs.pop('retry_backoff_max', None) - retry._max_wait_time_in_seconds = max_backoff or retry._max_wait_time_in_seconds - policy.RetryOptions = retry - connection_retry = kwargs.pop('connection_retry_policy', None) or policy.ConnectionRetryConfiguration - if not connection_retry: - connection_retry = ConnectionRetryPolicy( - retry_total=total_retries, - retry_connect=kwargs.pop('retry_connect', None), - retry_read=kwargs.pop('retry_read', None), - retry_status=kwargs.pop('retry_status', None), - retry_backoff_max=max_backoff, - retry_on_status_codes=kwargs.pop('retry_on_status_codes', []), - retry_backoff_factor=kwargs.pop('retry_backoff_factor', 0.8), - ) - policy.ConnectionRetryConfiguration = connection_retry - - return policy - - - -class AsyncCosmosClient(object): - """A client-side logical representation of an Azure Cosmos DB account. - - Use this client to configure and execute requests to the Azure Cosmos DB service. - - :param str url: The URL of the Cosmos DB account. - :param credential: Can be the account key, or a dictionary of resource tokens. - :type credential: str or dict[str, str] - :param str consistency_level: Consistency level to use for the session. The default value is "Session". - :keyword int timeout: An absolute timeout in seconds, for the combined HTTP request and response processing. - :keyword int request_timeout: The HTTP request timeout in milliseconds. - :keyword str connection_mode: The connection mode for the client - currently only supports 'Gateway'. - :keyword proxy_config: Connection proxy configuration. - :paramtype proxy_config: ~azure.cosmos.ProxyConfiguration - :keyword ssl_config: Connection SSL configuration. - :paramtype ssl_config: ~azure.cosmos.SSLConfiguration - :keyword bool connection_verify: Whether to verify the connection, default value is True. - :keyword str connection_cert: An alternative certificate to verify the connection. - :keyword int retry_total: Maximum retry attempts. - :keyword int retry_backoff_max: Maximum retry wait time in seconds. - :keyword int retry_fixed_interval: Fixed retry interval in milliseconds. - :keyword int retry_read: Maximum number of socket read retry attempts. - :keyword int retry_connect: Maximum number of connection error retry attempts. - :keyword int retry_status: Maximum number of retry attempts on error status codes. - :keyword list[int] retry_on_status_codes: A list of specific status codes to retry on. - :keyword float retry_backoff_factor: Factor to calculate wait time between retry attempts. - :keyword bool enable_endpoint_discovery: Enable endpoint discovery for - geo-replicated database accounts. (Default: True) - :keyword list[str] preferred_locations: The preferred locations for geo-replicated database accounts. - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START create_client] - :end-before: [END create_client] - :language: python - :dedent: 0 - :caption: Create a new instance of the Cosmos DB client: - :name: create_client - """ - - def __init__(self, url, credential, consistency_level="Session", **kwargs): - # type: (str, Any, str, Any) -> None - """Instantiate a new CosmosClient.""" - auth = _build_auth(credential) - connection_policy = _build_connection_policy(kwargs) - self.client_connection = CosmosClientConnection( - url, auth=auth, consistency_level=consistency_level, connection_policy=connection_policy, **kwargs - ) - - def __repr__(self): # pylint:disable=client-method-name-no-double-underscore - # type () -> str - return "".format(self.client_connection.url_connection)[:1024] - - def __enter__(self): - self.client_connection.pipeline_client.__enter__() - return self - - def __exit__(self, *args): - return self.client_connection.pipeline_client.__exit__(*args) - - @classmethod - def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): - # type: (str, Optional[Any], str, Any) -> CosmosClient - """Create a CosmosClient instance from a connection string. - - This can be retrieved from the Azure portal.For full list of optional - keyword arguments, see the CosmosClient constructor. - - :param str conn_str: The connection string. - :param credential: Alternative credentials to use instead of the key - provided in the connection string. - :type credential: str or dict(str, str) - :param str consistency_level: - Consistency level to use for the session. The default value is "Session". - """ - settings = _parse_connection_str(conn_str, credential) - return cls( - url=settings['AccountEndpoint'], - credential=credential or settings['AccountKey'], - consistency_level=consistency_level, - **kwargs - ) - - @staticmethod - def _get_database_link(database_or_id): - # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str - if isinstance(database_or_id, six.string_types): - return "dbs/{}".format(database_or_id) - try: - return cast("DatabaseProxy", database_or_id).database_link - except AttributeError: - pass - database_id = cast("Dict[str, str]", database_or_id)["id"] - return "dbs/{}".format(database_id) - - @distributed_trace - def create_database( # pylint: disable=redefined-builtin - self, - id, # type: str - populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> DatabaseProxy - """ - Create a new database with the given ID (name). - - :param id: ID (name) of the database to create. - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param int offer_throughput: The provisioned throughput for this offer. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A DatabaseProxy instance representing the new database. - :rtype: ~azure.cosmos.DatabaseProxy - :raises ~azure.cosmos.exceptions.CosmosResourceExistsError: Database with the given ID already exists. - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START create_database] - :end-before: [END create_database] - :language: python - :dedent: 0 - :caption: Create a database in the Cosmos DB account: - :name: create_database - """ - - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if offer_throughput is not None: - request_options["offerThroughput"] = offer_throughput - - result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers) - return DatabaseProxy(self.client_connection, id=result["id"], properties=result) - - @distributed_trace - def create_database_if_not_exists( # pylint: disable=redefined-builtin - self, - id, # type: str - populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> DatabaseProxy - """ - Create the database if it does not exist already. - - If the database already exists, the existing settings are returned. - - ..note:: - This function does not check or update existing database settings or - offer throughput if they differ from what is passed in. - - :param id: ID (name) of the database to read or create. - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param int offer_throughput: The provisioned throughput for this offer. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A DatabaseProxy instance representing the database. - :rtype: ~azure.cosmos.DatabaseProxy - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The database read or creation failed. - """ - try: - database_proxy = self.get_database_client(id) - database_proxy.read( - populate_query_metrics=populate_query_metrics, - **kwargs - ) - return database_proxy - except CosmosResourceNotFoundError: - return self.create_database( - id, - populate_query_metrics=populate_query_metrics, - offer_throughput=offer_throughput, - **kwargs - ) - - def get_database_client(self, database): - # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy - """Retrieve an existing database with the ID (name) `id`. - - :param database: The ID (name), dict representing the properties or - `DatabaseProxy` instance of the database to read. - :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy - :returns: A `DatabaseProxy` instance representing the retrieved database. - :rtype: ~azure.cosmos.DatabaseProxy - """ - if isinstance(database, DatabaseProxy): - id_value = database.id - else: - try: - id_value = database["id"] - except TypeError: - id_value = database - - return DatabaseProxy(self.client_connection, id_value) - - @distributed_trace - def list_databases( - self, - max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """List the databases in a Cosmos DB SQL database account. - - :param int max_item_count: Max number of items to be returned in the enumeration operation. - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of database properties (dicts). - :rtype: Iterable[dict[str, str]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics - - result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers) - return result - - @distributed_trace - def query_databases( - self, - query=None, # type: Optional[str] - parameters=None, # type: Optional[List[str]] - enable_cross_partition_query=None, # type: Optional[bool] - max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """Query the databases in a Cosmos DB SQL database account. - - :param str query: The Azure Cosmos DB SQL query to execute. - :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be - served as indexing was opted out on the requested paths. - :param int max_item_count: Max number of items to be returned in the enumeration operation. - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of database properties (dicts). - :rtype: Iterable[dict[str, str]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if enable_cross_partition_query is not None: - feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics - - if query: - # This is currently eagerly evaluated in order to capture the headers - # from the call. - # (just returning a generator did not initiate the first network call, so - # the headers were misleading) - # This needs to change for "real" implementation - query = query if parameters is None else dict(query=query, parameters=parameters) # type: ignore - result = self.client_connection.QueryDatabases(query=query, options=feed_options, **kwargs) - else: - result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers) - return result - - @distributed_trace - def delete_database( - self, - database, # type: Union[str, DatabaseProxy, Dict[str, Any]] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> None - """Delete the database with the given ID (name). - - :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` - instance of the database to delete. - :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the database couldn't be deleted. - :rtype: None - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - - database_link = self._get_database_link(database) - self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers) - - @distributed_trace - def get_database_account(self, **kwargs): - # type: (Any) -> DatabaseAccount - """Retrieve the database account information. - - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. - :rtype: ~azure.cosmos.DatabaseAccount - """ - response_hook = kwargs.pop('response_hook', None) - result = self.client_connection.GetDatabaseAccount(**kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers) - return result diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py deleted file mode 100644 index cbb1e0ab6902..000000000000 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database_async.py +++ /dev/null @@ -1,768 +0,0 @@ -# The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation - -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Interact with databases in the Azure Cosmos DB SQL API service. -""" - -from typing import Any, List, Dict, Union, cast, Iterable, Optional - -import warnings -import six -from azure.core.tracing.decorator import distributed_trace # type: ignore - -from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options -from .container_async import ContainerProxy -from ..offer import Offer -from ..http_constants import StatusCodes -from ..exceptions import CosmosResourceNotFoundError -from ..user import UserProxy -from ..documents import IndexingMode - -__all__ = ("DatabaseProxy",) - -# pylint: disable=protected-access -# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs - - -class DatabaseProxy(object): - """An interface to interact with a specific database. - - This class should not be instantiated directly. Instead use the - :func:`CosmosClient.get_database_client` method. - - A database contains one or more containers, each of which can contain items, - stored procedures, triggers, and user-defined functions. - - A database can also have associated users, each of which is configured with - a set of permissions for accessing certain containers, stored procedures, - triggers, user-defined functions, or items. - - :ivar id: The ID (name) of the database. - - An Azure Cosmos DB SQL API database has the following system-generated - properties. These properties are read-only: - - * `_rid`: The resource ID. - * `_ts`: When the resource was last updated. The value is a timestamp. - * `_self`: The unique addressable URI for the resource. - * `_etag`: The resource etag required for optimistic concurrency control. - * `_colls`: The addressable path of the collections resource. - * `_users`: The addressable path of the users resource. - """ - - def __init__(self, client_connection, id, properties=None): # pylint: disable=redefined-builtin - # type: (CosmosClientConnection, str, Dict[str, Any]) -> None - """ - :param ClientSession client_connection: Client from which this database was retrieved. - :param str id: ID (name) of the database. - """ - self.client_connection = client_connection - self.id = id - self.database_link = u"dbs/{}".format(self.id) - self._properties = properties - - def __repr__(self): - # type () -> str - return "".format(self.database_link)[:1024] - - @staticmethod - def _get_container_id(container_or_id): - # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str - if isinstance(container_or_id, six.string_types): - return container_or_id - try: - return cast("ContainerProxy", container_or_id).id - except AttributeError: - pass - return cast("Dict[str, str]", container_or_id)["id"] - - def _get_container_link(self, container_or_id): - # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str - return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) - - def _get_user_link(self, user_or_id): - # type: (Union[UserProxy, str, Dict[str, Any]]) -> str - if isinstance(user_or_id, six.string_types): - return u"{}/users/{}".format(self.database_link, user_or_id) - try: - return cast("UserProxy", user_or_id).user_link - except AttributeError: - pass - return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) - - def _get_properties(self): - # type: () -> Dict[str, Any] - if self._properties is None: - self._properties = self.read() - return self._properties - - @distributed_trace - def read(self, populate_query_metrics=None, **kwargs): - # type: (Optional[bool], Any) -> Dict[str, Any] - """Read the database properties. - - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :rtype: Dict[Str, Any] - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. - """ - # TODO this helper function should be extracted from CosmosClient - from .cosmos_client_async import CosmosClient - - database_link = CosmosClient._get_database_link(self) - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - - self._properties = self.client_connection.ReadDatabase( - database_link, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, self._properties) - - return cast('Dict[str, Any]', self._properties) - - @distributed_trace - async def create_container( - self, - id, # type: str # pylint: disable=redefined-builtin - partition_key, # type: Any - indexing_policy=None, # type: Optional[Dict[str, Any]] - default_ttl=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] - unique_key_policy=None, # type: Optional[Dict[str, Any]] - conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] - **kwargs # type: Any - ): - # type: (...) -> ContainerProxy - """Create a new container with the given ID (name). - - If a container with the given ID already exists, a CosmosResourceExistsError is raised. - - :param id: ID (name) of container to create. - :param partition_key: The partition key to use for the container. - :param indexing_policy: The indexing policy to apply to the container. - :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param offer_throughput: The provisioned throughput for this offer. - :param unique_key_policy: The unique key policy to apply to the container. - :param conflict_resolution_policy: The conflict resolution policy to apply to the container. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of - None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please - note that analytical storage can only be enabled on Synapse Link enabled accounts. - :returns: A `ContainerProxy` instance representing the new container. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container creation failed. - :rtype: ~azure.cosmos.ContainerProxy - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START create_container] - :end-before: [END create_container] - :language: python - :dedent: 0 - :caption: Create a container with default settings: - :name: create_container - - .. literalinclude:: ../samples/examples.py - :start-after: [START create_container_with_settings] - :end-before: [END create_container_with_settings] - :language: python - :dedent: 0 - :caption: Create a container with specific settings; in this case, a custom partition key: - :name: create_container_with_settings - """ - definition = dict(id=id) # type: Dict[str, Any] - if partition_key is not None: - definition["partitionKey"] = partition_key - if indexing_policy is not None: - if indexing_policy.get("indexingMode") is IndexingMode.Lazy: - warnings.warn( - "Lazy indexing mode has been deprecated. Mode will be set to consistent indexing by the backend.", - DeprecationWarning - ) - definition["indexingPolicy"] = indexing_policy - if default_ttl is not None: - definition["defaultTtl"] = default_ttl - if unique_key_policy is not None: - definition["uniqueKeyPolicy"] = unique_key_policy - if conflict_resolution_policy is not None: - definition["conflictResolutionPolicy"] = conflict_resolution_policy - - analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) - if analytical_storage_ttl is not None: - definition["analyticalStorageTtl"] = analytical_storage_ttl - - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if offer_throughput is not None: - request_options["offerThroughput"] = offer_throughput - - data = self.client_connection.CreateContainer( - database_link=self.database_link, collection=definition, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, data) - - return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) - - @distributed_trace - def create_container_if_not_exists( - self, - id, # type: str # pylint: disable=redefined-builtin - partition_key, # type: Any - indexing_policy=None, # type: Optional[Dict[str, Any]] - default_ttl=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] - unique_key_policy=None, # type: Optional[Dict[str, Any]] - conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] - **kwargs # type: Any - ): - # type: (...) -> ContainerProxy - """Create a container if it does not exist already. - - If the container already exists, the existing settings are returned. - Note: it does not check or update the existing container settings or offer throughput - if they differ from what was passed into the method. - - :param id: ID (name) of container to read or create. - :param partition_key: The partition key to use for the container. - :param indexing_policy: The indexing policy to apply to the container. - :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param offer_throughput: The provisioned throughput for this offer. - :param unique_key_policy: The unique key policy to apply to the container. - :param conflict_resolution_policy: The conflict resolution policy to apply to the container. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of - None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please - note that analytical storage can only be enabled on Synapse Link enabled accounts. - :returns: A `ContainerProxy` instance representing the container. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container read or creation failed. - :rtype: ~azure.cosmos.ContainerProxy - """ - - analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) - try: - container_proxy = self.get_container_client(id) - container_proxy.read( - populate_query_metrics=populate_query_metrics, - **kwargs - ) - return container_proxy - except CosmosResourceNotFoundError: - return self.create_container( - id=id, - partition_key=partition_key, - indexing_policy=indexing_policy, - default_ttl=default_ttl, - populate_query_metrics=populate_query_metrics, - offer_throughput=offer_throughput, - unique_key_policy=unique_key_policy, - conflict_resolution_policy=conflict_resolution_policy, - analytical_storage_ttl=analytical_storage_ttl - ) - - @distributed_trace - def delete_container( - self, - container, # type: Union[str, ContainerProxy, Dict[str, Any]] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> None - """Delete a container. - - :param container: The ID (name) of the container to delete. You can either - pass in the ID of the container to delete, a :class:`ContainerProxy` instance or - a dict representing the properties of the container. - :param populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the container couldn't be deleted. - :rtype: None - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - - collection_link = self._get_container_link(container) - result = self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - - def get_container_client(self, container): - # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy - """Get a `ContainerProxy` for a container with specified ID (name). - - :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, - or a dict representing the properties of the container to be retrieved. - :rtype: ~azure.cosmos.ContainerProxy - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START get_container] - :end-before: [END get_container] - :language: python - :dedent: 0 - :caption: Get an existing container, handling a failure if encountered: - :name: get_container - """ - if isinstance(container, ContainerProxy): - id_value = container.id - else: - try: - id_value = container["id"] - except TypeError: - id_value = container - - return ContainerProxy(self.client_connection, self.database_link, id_value) - - @distributed_trace - def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): - # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] - """List the containers in the database. - - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of container properties (dicts). - :rtype: Iterable[dict[str, Any]] - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START list_containers] - :end-before: [END list_containers] - :language: python - :dedent: 0 - :caption: List all containers in the database: - :name: list_containers - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics - - result = self.client_connection.ReadContainers( - database_link=self.database_link, options=feed_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return response_hook - - @distributed_trace - def query_containers( - self, - query=None, # type: Optional[str] - parameters=None, # type: Optional[List[str]] - max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Iterable[Dict[str, Any]] - """List the properties for containers in the current database. - - :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of container properties (dicts). - :rtype: Iterable[dict[str, Any]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics - - result = self.client_connection.QueryContainers( - database_link=self.database_link, - query=query if parameters is None else dict(query=query, parameters=parameters), - options=feed_options, - **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def replace_container( - self, - container, # type: Union[str, ContainerProxy, Dict[str, Any]] - partition_key, # type: Any - indexing_policy=None, # type: Optional[Dict[str, Any]] - default_ttl=None, # type: Optional[int] - conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] - populate_query_metrics=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> ContainerProxy - """Reset the properties of the container. - - Property changes are persisted immediately. Any properties not specified - will be reset to their default values. - - :param container: The ID (name), dict representing the properties or - :class:`ContainerProxy` instance of the container to be replaced. - :param partition_key: The partition key to use for the container. - :param indexing_policy: The indexing policy to apply to the container. - :param default_ttl: Default time to live (TTL) for items in the container. - If unspecified, items do not expire. - :param conflict_resolution_policy: The conflict resolution policy to apply to the container. - :param populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be replaced. - This includes if the container with given id does not exist. - :returns: A `ContainerProxy` instance representing the container after replace completed. - :rtype: ~azure.cosmos.ContainerProxy - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START reset_container_properties] - :end-before: [END reset_container_properties] - :language: python - :dedent: 0 - :caption: Reset the TTL property on a container, and display the updated properties: - :name: reset_container_properties - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - - container_id = self._get_container_id(container) - container_link = self._get_container_link(container_id) - parameters = { - key: value - for key, value in { - "id": container_id, - "partitionKey": partition_key, - "indexingPolicy": indexing_policy, - "defaultTtl": default_ttl, - "conflictResolutionPolicy": conflict_resolution_policy, - }.items() - if value is not None - } - - container_properties = self.client_connection.ReplaceContainer( - container_link, collection=parameters, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, container_properties) - - return ContainerProxy( - self.client_connection, self.database_link, container_properties["id"], properties=container_properties - ) - - @distributed_trace - def list_users(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] - """List all the users in the container. - - :param max_item_count: Max number of users to be returned in the enumeration operation. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of user properties (dicts). - :rtype: Iterable[dict[str, Any]] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - - result = self.client_connection.ReadUsers( - database_link=self.database_link, options=feed_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - @distributed_trace - def query_users(self, query, parameters=None, max_item_count=None, **kwargs): - # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] - """Return all users matching the given `query`. - - :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param max_item_count: Max number of users to be returned in the enumeration operation. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of user properties (dicts). - :rtype: Iterable[str, Any] - """ - feed_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if max_item_count is not None: - feed_options["maxItemCount"] = max_item_count - - result = self.client_connection.QueryUsers( - database_link=self.database_link, - query=query if parameters is None else dict(query=query, parameters=parameters), - options=feed_options, - **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - return result - - def get_user_client(self, user): - # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy - """Get a `UserProxy` for a user with specified ID. - - :param user: The ID (name), dict representing the properties or :class:`UserProxy` - instance of the user to be retrieved. - :returns: A `UserProxy` instance representing the retrieved user. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. - :rtype: ~azure.cosmos.UserProxy - """ - if isinstance(user, UserProxy): - id_value = user.id - else: - try: - id_value = user["id"] - except TypeError: - id_value = user - - return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) - - @distributed_trace - def create_user(self, body, **kwargs): - # type: (Dict[str, Any], Any) -> UserProxy - """Create a new user in the container. - - To update or replace an existing user, use the - :func:`ContainerProxy.upsert_user` method. - - :param body: A dict-like object with an `id` key and value representing the user to be created. - The user ID must be unique within the database, and consist of no more than 255 characters. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A `UserProxy` instance representing the new user. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be created. - :rtype: ~azure.cosmos.UserProxy - - .. admonition:: Example: - - .. literalinclude:: ../samples/examples.py - :start-after: [START create_user] - :end-before: [END create_user] - :language: python - :dedent: 0 - :caption: Create a database user: - :name: create_user - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - user = self.client_connection.CreateUser( - database_link=self.database_link, user=body, options=request_options, **kwargs) - - if response_hook: - response_hook(self.client_connection.last_response_headers, user) - - return UserProxy( - client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user - ) - - @distributed_trace - def upsert_user(self, body, **kwargs): - # type: (Dict[str, Any], Any) -> UserProxy - """Insert or update the specified user. - - If the user already exists in the container, it is replaced. If the user - does not already exist, it is inserted. - - :param body: A dict-like object representing the user to update or insert. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A `UserProxy` instance representing the upserted user. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user could not be upserted. - :rtype: ~azure.cosmos.UserProxy - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - user = self.client_connection.UpsertUser( - database_link=self.database_link, user=body, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, user) - - return UserProxy( - client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user - ) - - @distributed_trace - def replace_user( - self, - user, # type: Union[str, UserProxy, Dict[str, Any]] - body, # type: Dict[str, Any] - **kwargs # type: Any - ): - # type: (...) -> UserProxy - """Replaces the specified user if it exists in the container. - - :param user: The ID (name), dict representing the properties or :class:`UserProxy` - instance of the user to be replaced. - :param body: A dict-like object representing the user to replace. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A `UserProxy` instance representing the user after replace went through. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: - If the replace failed or the user with given ID does not exist. - :rtype: ~azure.cosmos.UserProxy - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - replaced_user = self.client_connection.ReplaceUser( - user_link=self._get_user_link(user), user=body, options=request_options, **kwargs - ) # type: Dict[str, str] - - if response_hook: - response_hook(self.client_connection.last_response_headers, replaced_user) - - return UserProxy( - client_connection=self.client_connection, - id=replaced_user["id"], - database_link=self.database_link, - properties=replaced_user - ) - - @distributed_trace - def delete_user(self, user, **kwargs): - # type: (Union[str, UserProxy, Dict[str, Any]], Any) -> None - """Delete the specified user from the container. - - :param user: The ID (name), dict representing the properties or :class:`UserProxy` - instance of the user to be deleted. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The user wasn't deleted successfully. - :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The user does not exist in the container. - :rtype: None - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - result = self.client_connection.DeleteUser( - user_link=self._get_user_link(user), options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) - - @distributed_trace - def read_offer(self, **kwargs): - # type: (Any) -> Offer - """Read the Offer object for this database. - - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: Offer for the database. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: - If no offer exists for the database or if the offer could not be retrieved. - :rtype: ~azure.cosmos.Offer - """ - response_hook = kwargs.pop('response_hook', None) - properties = self._get_properties() - link = properties["_self"] - query_spec = { - "query": "SELECT * FROM root r WHERE r.resource=@link", - "parameters": [{"name": "@link", "value": link}], - } - offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) - if not offers: - raise CosmosResourceNotFoundError( - status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for database " + self.database_link) - - if response_hook: - response_hook(self.client_connection.last_response_headers, offers) - - return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) - - @distributed_trace - def replace_throughput(self, throughput, **kwargs): - # type: (Optional[int], Any) -> Offer - """Replace the database-level throughput. - - :param throughput: The throughput to be set (an integer). - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: Offer for the database, updated with new throughput. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: - If no offer exists for the database or if the offer could not be updated. - :rtype: ~azure.cosmos.Offer - """ - response_hook = kwargs.pop('response_hook', None) - properties = self._get_properties() - link = properties["_self"] - query_spec = { - "query": "SELECT * FROM root r WHERE r.resource=@link", - "parameters": [{"name": "@link", "value": link}], - } - offers = list(self.client_connection.QueryOffers(query_spec)) - if not offers: - raise CosmosResourceNotFoundError( - status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for collection " + self.database_link) - new_offer = offers[0].copy() - new_offer["content"]["offerThroughput"] = throughput - data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) - if response_hook: - response_hook(self.client_connection.last_response_headers, data) - return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 4a2e6cdcbc50..2954a3578faf 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -179,6 +179,9 @@ def __enter__(self): def __exit__(self, *args): return self.client_connection.pipeline_client.__exit__(*args) + def close(self): + self.__exit__() + @classmethod def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): # type: (str, Optional[Any], str, Any) -> CosmosClient diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index c26bbc42d81c..5a4aacad43e7 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -6,24 +6,21 @@ from azure.core.tracing.decorator import distributed_trace import asyncio from azure.cosmos import partition_key, cosmos_client -from azure.cosmos.aio.cosmos_client_async import AsyncCosmosClient +from azure.cosmos.aio.cosmos_client import CosmosClient import azure.cosmos.exceptions as exceptions from azure.cosmos.partition_key import PartitionKey from azure.cosmos.database import DatabaseProxy -from azure.cosmos.aio.database_async import DatabaseProxy import config import heroes -def get_azure_data(): - endpoint = "https://simonmoreno-sql.documents.azure.com:443/" - key = 'd3KEBamwtPiQpuuyFSlXEOF98cuhL8oqW3jQygmAfTOPImEZPN2yYWFd4IE5pQNdBF70v8I7LldjXB6fimMbrg==' - return [endpoint, key] +endpoint = '' +key = '' def creation(): # - client = AsyncCosmosClient(get_azure_data()[0], get_azure_data()[1]) + client = CosmosClient(endpoint, key) # Date: Fri, 27 Aug 2021 15:56:40 -0400 Subject: [PATCH 03/56] read database database read works, but ignored exception is returned: Fatal error on SSL transport NoneType has no attribute 'send' (_loop._proactor.send) RuntimeError: Event loop is closed Unclosed connector/ connection --- .../aio/_cosmos_client_connection_async.py | 70 +++++++++++ .../aio/_global_endpoint_manager_async.py | 99 +-------------- .../azure/cosmos/aio/cosmos_client.py | 36 +++++- .../azure-cosmos/azure/cosmos/aio/database.py | 114 ++++++++++++++++++ .../azure-cosmos/samples/simon_testfile.py | 71 +++-------- 5 files changed, 238 insertions(+), 152 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 645cb0b47e53..04cacae97c13 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -213,3 +213,73 @@ async def _setup(self): if not 'database_account' in self._setup_kwargs: self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount(**self._setup_kwargs) await self._global_endpoint_manager.force_refresh(self._setup_kwargs['database_account']) + + async def ReadDatabase(self, database_link, options=None, **kwargs): + """Reads a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + :return: + The Database that was read. + :rtype: dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return await self.Read(path, "dbs", database_id, None, options, **kwargs) + + async def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Reads a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) + # Read will use ReadEndpoint since it uses GET operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + result, self.last_response_headers = await self.__Get(path, request_params, headers, **kwargs) + return result + + async def __Get(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'GET' async http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.get(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 089daf5a225d..9e99276e8b28 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -24,7 +24,6 @@ """ import asyncio - from six.moves.urllib.parse import urlparse from .. import _constants as constants @@ -33,7 +32,6 @@ # pylint: disable=protected-access - class _GlobalEndpointManager(object): """ This internal class implements the logic for endpoint management for @@ -76,99 +74,4 @@ def mark_endpoint_unavailable_for_write(self, endpoint): self.location_cache.mark_endpoint_unavailable_for_write(endpoint) def get_ordered_write_endpoints(self): - return self.location_cache.get_ordered_write_endpoints() - - def get_ordered_read_endpoints(self): - return self.location_cache.get_ordered_read_endpoints() - - def can_use_multiple_write_locations(self, request): - return self.location_cache.can_use_multiple_write_locations_for_request(request) - - async def force_refresh(self, database_account): - self.refresh_needed = True - await self.refresh_endpoint_list(database_account) - - async def refresh_endpoint_list(self, database_account, **kwargs): - async with self.refresh_lock: - # if refresh is not needed or refresh is already taking place, return - if not self.refresh_needed: - return - try: - await self._refresh_endpoint_list_private(database_account, **kwargs) - except Exception as e: - raise e - - async def _refresh_endpoint_list_private(self, database_account=None, **kwargs): - if database_account: - self.location_cache.perform_on_database_account_read(database_account) - self.refresh_needed = False - - if ( - self.location_cache.should_refresh_endpoints() - and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms - ): - if not database_account: - database_account = await self._GetDatabaseAccount(**kwargs) - self.location_cache.perform_on_database_account_read(database_account) - self.last_refresh_time = self.location_cache.current_time_millis() - self.refresh_needed = False - - async def _GetDatabaseAccount(self, **kwargs): - """Gets the database account. - - First tries by using the default endpoint, and if that doesn't work, - use the endpoints for the preferred locations in the order they are - specified, to get the database account. - """ - try: - database_account = await self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) - return database_account - # If for any reason(non-globaldb related), we are not able to get the database - # account from the above call to GetDatabaseAccount, we would try to get this - # information from any of the preferred locations that the user might have - # specified (by creating a locational endpoint) and keeping eating the exception - # until we get the database account and return None at the end, if we are not able - # to get that info from any endpoints - except exceptions.CosmosHttpResponseError: - for location_name in self.PreferredLocations: - locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) - try: - database_account = await self._GetDatabaseAccountStub(locational_endpoint, **kwargs) - return database_account - except exceptions.CosmosHttpResponseError: - pass - - return None - - def _GetDatabaseAccountStub(self, endpoint, **kwargs): - """Stub for getting database account from the client. - - This can be used for mocking purposes as well. - """ - return self.Client.GetDatabaseAccount(endpoint, **kwargs) - - @staticmethod - def GetLocationalEndpoint(default_endpoint, location_name): - # For default_endpoint like 'https://contoso.documents.azure.com:443/' parse it to - # generate URL format. This default_endpoint should be global endpoint(and cannot - # be a locational endpoint) and we agreed to document that - endpoint_url = urlparse(default_endpoint) - - # hostname attribute in endpoint_url will return 'contoso.documents.azure.com' - if endpoint_url.hostname is not None: - hostname_parts = str(endpoint_url.hostname).lower().split(".") - if hostname_parts is not None: - # global_database_account_name will return 'contoso' - global_database_account_name = hostname_parts[0] - - # Prepare the locational_database_account_name as contoso-EastUS for location_name 'East US' - locational_database_account_name = global_database_account_name + "-" + location_name.replace(" ", "") - - # Replace 'contoso' with 'contoso-EastUS' and return locational_endpoint - # as https://contoso-EastUS.documents.azure.com:443/ - locational_endpoint = default_endpoint.lower().replace( - global_database_account_name, locational_database_account_name, 1 - ) - return locational_endpoint - - return None + return self.location_cache.get_ordered_write_endpoints() \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 26166d14bf6c..d2f618b49235 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -31,7 +31,7 @@ from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options from ._retry_utility import ConnectionRetryPolicy -# from .database import DatabaseProxy +from .database import DatabaseProxy from ..documents import ConnectionPolicy, DatabaseAccount from ..exceptions import CosmosResourceNotFoundError @@ -87,8 +87,6 @@ def _build_connection_policy(kwargs): return policy - - class CosmosClient(object): """A client-side logical representation of an Azure Cosmos DB account. @@ -161,3 +159,35 @@ def from_connection_string(cls, conn_str, credential=None, consistency_level="Se consistency_level=consistency_level, **kwargs ) + + @staticmethod + def _get_database_link(database_or_id): + # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str + if isinstance(database_or_id, six.string_types): + return "dbs/{}".format(database_or_id) + try: + return cast("DatabaseProxy", database_or_id).database_link + except AttributeError: + pass + database_id = cast("Dict[str, str]", database_or_id)["id"] + return "dbs/{}".format(database_id) + + def get_database_client(self, database): + # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy + """Retrieve an existing database with the ID (name) `id`. + + :param database: The ID (name), dict representing the properties or + `DatabaseProxy` instance of the database to read. + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy + :returns: A `DatabaseProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.DatabaseProxy + """ + if isinstance(database, DatabaseProxy): + id_value = database.id + else: + try: + id_value = database["id"] + except TypeError: + id_value = database + + return DatabaseProxy(self.client_connection, id_value) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py new file mode 100644 index 000000000000..7f8d20acd98e --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -0,0 +1,114 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Interact with databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, cast, Iterable, Optional + +import warnings +import six +from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +# from .container import ContainerProxy +from ..offer import Offer +from ..http_constants import StatusCodes +from ..exceptions import CosmosResourceNotFoundError +from ..user import UserProxy +from ..documents import IndexingMode + +__all__ = ("DatabaseProxy",) + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + +class DatabaseProxy(object): + """An interface to interact with a specific database. + + This class should not be instantiated directly. Instead use the + :func:`CosmosClient.get_database_client` method. + + A database contains one or more containers, each of which can contain items, + stored procedures, triggers, and user-defined functions. + + A database can also have associated users, each of which is configured with + a set of permissions for accessing certain containers, stored procedures, + triggers, user-defined functions, or items. + + :ivar id: The ID (name) of the database. + + An Azure Cosmos DB SQL API database has the following system-generated + properties. These properties are read-only: + + * `_rid`: The resource ID. + * `_ts`: When the resource was last updated. The value is a timestamp. + * `_self`: The unique addressable URI for the resource. + * `_etag`: The resource etag required for optimistic concurrency control. + * `_colls`: The addressable path of the collections resource. + * `_users`: The addressable path of the users resource. + """ + + def __init__(self, client_connection, id, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, Dict[str, Any]) -> None + """ + :param ClientSession client_connection: Client from which this database was retrieved. + :param str id: ID (name) of the database. + """ + self.client_connection = client_connection + self.id = id + self.database_link = u"dbs/{}".format(self.id) + self._properties = properties + + def __repr__(self): + # type () -> str + return "".format(self.database_link)[:1024] + + @distributed_trace_async + async def read(self, populate_query_metrics=None, **kwargs): + # type: (Optional[bool], Any) -> Dict[str, Any] + """Read the database properties. + + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :rtype: Dict[Str, Any] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. + """ + # TODO this helper function should be extracted from CosmosClient + from .cosmos_client import CosmosClient + + database_link = CosmosClient._get_database_link(self) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + self._properties = await self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 5a4aacad43e7..737dbda424f0 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -5,22 +5,21 @@ from azure.cosmos import container from azure.core.tracing.decorator import distributed_trace import asyncio -from azure.cosmos import partition_key, cosmos_client -from azure.cosmos.aio.cosmos_client import CosmosClient +from azure.cosmos.aio.cosmos_client import CosmosClient as AsyncClient +from azure.cosmos.cosmos_client import CosmosClient as SyncClient import azure.cosmos.exceptions as exceptions from azure.cosmos.partition_key import PartitionKey -from azure.cosmos.database import DatabaseProxy import config import heroes -endpoint = '' -key = '' +endpoint = 'https://simonmoreno-sql.documents.azure.com:443/' +key = 'VV8mEoVa7aQJJLBBAy6vWOoga0eBS3XtT2CjqkqwfgReMh3ZBwOZC7QCsRyTvmyQFf4TyWFaSHdleDKK3JWKDg==' def creation(): # - client = CosmosClient(endpoint, key) + client = SyncClient(endpoint, key) # Date: Fri, 27 Aug 2021 16:02:16 -0400 Subject: [PATCH 04/56] Update simon_testfile.py --- sdk/cosmos/azure-cosmos/samples/simon_testfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 737dbda424f0..c69c5ce110fa 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -13,8 +13,8 @@ import config import heroes -endpoint = 'https://simonmoreno-sql.documents.azure.com:443/' -key = 'VV8mEoVa7aQJJLBBAy6vWOoga0eBS3XtT2CjqkqwfgReMh3ZBwOZC7QCsRyTvmyQFf4TyWFaSHdleDKK3JWKDg==' +endpoint = '' +key = '' def creation(): From 80540dcd3a972e23906e7a60b6ef9f5fd4c979a5 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 30 Aug 2021 12:03:45 -0400 Subject: [PATCH 05/56] with coroutine Added methods needed to use async with when initializing client, but logs output "Exception ignored... Runtime Error: Event loop is closed" --- .../azure/cosmos/aio/_asynchronous_request.py | 2 +- .../aio/_cosmos_client_connection_async.py | 50 +++++++++++-- .../aio/_global_endpoint_manager_async.py | 70 ++++++++++++++++++- .../azure-cosmos/samples/simon_testfile.py | 14 +++- 4 files changed, 126 insertions(+), 10 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index a1afc3f39e93..20fbfd649f1e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -59,7 +59,7 @@ async def _Request(global_endpoint_manager, request_params, connection_policy, p # Every request tries to perform a refresh client_timeout = kwargs.get('timeout') start_time = time.time() - global_endpoint_manager.refresh_endpoint_list(None, **kwargs) + await global_endpoint_manager.refresh_endpoint_list(None, **kwargs) if client_timeout is not None: kwargs['timeout'] = client_timeout - (time.time() - start_time) if kwargs['timeout'] <= 0: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 04cacae97c13..79bf24c90529 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -32,7 +32,6 @@ from urllib3.util.retry import Retry from azure.core.async_paging import AsyncItemPaged from azure.core import AsyncPipelineClient -from azure.core import PipelineClient from azure.core.exceptions import raise_with_traceback # type: ignore from azure.core.pipeline.policies import ( AsyncHTTPPolicy, @@ -53,9 +52,7 @@ from .. import _query_iterable as query_iterable from .. import _runtime_constants as runtime_constants from .. import _request_object -from .. import _synchronized_request as synchronized_request from . import _asynchronous_request as asynchronous_request -from .. import _global_endpoint_manager as global_endpoint_manager from . import _global_endpoint_manager_async as global_endpoint_manager_async from .._routing import routing_map_provider from ._retry_utility import ConnectionRetryPolicy @@ -157,7 +154,7 @@ def __init__( self.session = None # type: ignore self._useMultipleWriteLocations = False - self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) + self._global_endpoint_manager = global_endpoint_manager_async._GlobalEndpointManager(self) retry_policy = None if isinstance(self.connection_policy.ConnectionRetryConfiguration, AsyncHTTPPolicy): @@ -214,6 +211,51 @@ async def _setup(self): self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount(**self._setup_kwargs) await self._global_endpoint_manager.force_refresh(self._setup_kwargs['database_account']) + async def GetDatabaseAccount(self, url_connection=None, **kwargs): + """Gets database account info. + + :return: + The Database Account. + :rtype: + documents.DatabaseAccount + + """ + if url_connection is None: + url_connection = self.url_connection + + initial_headers = dict(self.default_headers) + headers = base.GetHeaders(self, initial_headers, "get", "", "", "", {}) # path # id # type + + request_params = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) + result, self.last_response_headers = await self.__Get("", request_params, headers, **kwargs) + database_account = documents.DatabaseAccount() + database_account.DatabasesLink = "/dbs/" + database_account.MediaLink = "/media/" + if http_constants.HttpHeaders.MaxMediaStorageUsageInMB in self.last_response_headers: + database_account.MaxMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.MaxMediaStorageUsageInMB + ] + if http_constants.HttpHeaders.CurrentMediaStorageUsageInMB in self.last_response_headers: + database_account.CurrentMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.CurrentMediaStorageUsageInMB + ] + database_account.ConsistencyPolicy = result.get(constants._Constants.UserConsistencyPolicy) + + # WritableLocations and ReadableLocations fields will be available only for geo-replicated database accounts + if constants._Constants.WritableLocations in result: + database_account._WritableLocations = result[constants._Constants.WritableLocations] + if constants._Constants.ReadableLocations in result: + database_account._ReadableLocations = result[constants._Constants.ReadableLocations] + if constants._Constants.EnableMultipleWritableLocations in result: + database_account._EnableMultipleWritableLocations = result[ + constants._Constants.EnableMultipleWritableLocations + ] + + self._useMultipleWriteLocations = ( + self.connection_policy.UseMultipleWriteLocations and database_account._EnableMultipleWritableLocations + ) + return database_account + async def ReadDatabase(self, database_link, options=None, **kwargs): """Reads a database. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 9e99276e8b28..03dbdb6c01a3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -52,7 +52,7 @@ def __init__(self, client): self.refresh_time_interval_in_ms, ) self.refresh_needed = False - self.refresh_lock = asyncio.RLock() + self.refresh_lock = asyncio.Lock() #Lock vs. RLock self.last_refresh_time = 0 def get_refresh_time_interval_in_ms_stub(self): # pylint: disable=no-self-use @@ -74,4 +74,70 @@ def mark_endpoint_unavailable_for_write(self, endpoint): self.location_cache.mark_endpoint_unavailable_for_write(endpoint) def get_ordered_write_endpoints(self): - return self.location_cache.get_ordered_write_endpoints() \ No newline at end of file + return self.location_cache.get_ordered_write_endpoints() + + def can_use_multiple_write_locations(self, request): + return self.location_cache.can_use_multiple_write_locations_for_request(request) + + async def force_refresh(self, database_account): + self.refresh_needed = True + self.refresh_endpoint_list(database_account) + + async def refresh_endpoint_list(self, database_account, **kwargs): + async with self.refresh_lock: + # if refresh is not needed or refresh is already taking place, return + if not self.refresh_needed: + return + try: + await self._refresh_endpoint_list_private(database_account, **kwargs) + except Exception as e: + raise e + + async def _refresh_endpoint_list_private(self, database_account=None, **kwargs): + if database_account: + self.location_cache.perform_on_database_account_read(database_account) + self.refresh_needed = False + + if ( + self.location_cache.should_refresh_endpoints() + and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms + ): + if not database_account: + database_account = await self._GetDatabaseAccount(**kwargs) + self.location_cache.perform_on_database_account_read(database_account) + self.last_refresh_time = self.location_cache.current_time_millis() + self.refresh_needed = False + + async def _GetDatabaseAccount(self, **kwargs): + """Gets the database account. + + First tries by using the default endpoint, and if that doesn't work, + use the endpoints for the preferred locations in the order they are + specified, to get the database account. + """ + try: + database_account = await self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) + return database_account + # If for any reason(non-globaldb related), we are not able to get the database + # account from the above call to GetDatabaseAccount, we would try to get this + # information from any of the preferred locations that the user might have + # specified (by creating a locational endpoint) and keeping eating the exception + # until we get the database account and return None at the end, if we are not able + # to get that info from any endpoints + except exceptions.CosmosHttpResponseError: + for location_name in self.PreferredLocations: + locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) + try: + database_account = await self._GetDatabaseAccountStub(locational_endpoint, **kwargs) + return database_account + except exceptions.CosmosHttpResponseError: + pass + + return None + + async def _GetDatabaseAccountStub(self, endpoint, **kwargs): + """Stub for getting database account from the client. + + This can be used for mocking purposes as well. + """ + return await self.Client.GetDatabaseAccount(endpoint, **kwargs) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index c69c5ce110fa..5b1c1ff96f45 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -13,8 +13,8 @@ import config import heroes -endpoint = '' -key = '' +endpoint = 'https://simonmoreno-sql.documents.azure.com:443/' +key = 'Z8HFr11W7ci3wqxs8RvqtzP2IyA9RZw3M11g0LR3jAFe3V1ijJ3BPas6My8pusXx6bPQO89MMxripXvcZDrJkg==' def creation(): @@ -127,5 +127,13 @@ async def async_read_test(): # container = db.get_container_client(id="AsyncContainer") # print(container.read()) +async def with_read_test(): + async with AsyncClient(endpoint, key) as client: + print(client) + db = client.get_database_client(db_name) + if db: print(db) + x = await db.read() + print(x) -asyncio.run(async_read_test()) \ No newline at end of file +# asyncio.run(async_read_test()) +asyncio.run(with_read_test()) \ No newline at end of file From 128543821ba595206c2ea5197958070816e35e72 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 30 Aug 2021 12:04:14 -0400 Subject: [PATCH 06/56] Update simon_testfile.py --- sdk/cosmos/azure-cosmos/samples/simon_testfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 5b1c1ff96f45..48e99859d6f3 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -13,8 +13,8 @@ import config import heroes -endpoint = 'https://simonmoreno-sql.documents.azure.com:443/' -key = 'Z8HFr11W7ci3wqxs8RvqtzP2IyA9RZw3M11g0LR3jAFe3V1ijJ3BPas6My8pusXx6bPQO89MMxripXvcZDrJkg==' +endpoint = '' +key = '' def creation(): From 992b0cd9eec7e4d08d1bd4ff8c40b8510a41f447 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Tue, 31 Aug 2021 12:09:56 -0400 Subject: [PATCH 07/56] small changes --- .../azure/cosmos/aio/_global_endpoint_manager_async.py | 2 +- sdk/cosmos/azure-cosmos/samples/simon_testfile.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 03dbdb6c01a3..4d5cd77e91cb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -81,7 +81,7 @@ def can_use_multiple_write_locations(self, request): async def force_refresh(self, database_account): self.refresh_needed = True - self.refresh_endpoint_list(database_account) + await self.refresh_endpoint_list(database_account) async def refresh_endpoint_list(self, database_account, **kwargs): async with self.refresh_lock: diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 48e99859d6f3..8a0e0b990be6 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -124,6 +124,7 @@ async def async_read_test(): if db: print(db) x = await db.read() print(x) + await client.__aexit__() # container = db.get_container_client(id="AsyncContainer") # print(container.read()) From 47cb688653a707384baf456e3b8c6f7e01b83caa Mon Sep 17 00:00:00 2001 From: simorenoh Date: Tue, 31 Aug 2021 13:40:09 -0400 Subject: [PATCH 08/56] async with returns no exceptions --- sdk/cosmos/azure-cosmos/samples/simon_testfile.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 8a0e0b990be6..d9d703496afd 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -136,5 +136,9 @@ async def with_read_test(): x = await db.read() print(x) -# asyncio.run(async_read_test()) -asyncio.run(with_read_test()) \ No newline at end of file +async def main(): + await with_read_test() + +if __name__ == "__main__": + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file From 0c49739713004e58245aaa745feb6de6258f151f Mon Sep 17 00:00:00 2001 From: simorenoh Date: Wed, 1 Sep 2021 17:49:47 -0400 Subject: [PATCH 09/56] async read container --- .../aio/_cosmos_client_connection_async.py | 21 ++++ .../azure/cosmos/aio/container.py | 114 ++++++++++++++++++ .../azure-cosmos/azure/cosmos/aio/database.py | 32 ++++- .../azure-cosmos/samples/simon_testfile.py | 8 +- 4 files changed, 171 insertions(+), 4 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 79bf24c90529..b199493bfa04 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -275,6 +275,27 @@ async def ReadDatabase(self, database_link, options=None, **kwargs): database_id = base.GetResourceIdOrFullNameFromLink(database_link) return await self.Read(path, "dbs", database_id, None, options, **kwargs) + async def ReadContainer(self, collection_link, options=None, **kwargs): + """Reads a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The read Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return await self.Read(path, "colls", collection_id, None, options, **kwargs) + async def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Reads a Azure Cosmos resource and returns it. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py new file mode 100644 index 000000000000..6a81de736c1f --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -0,0 +1,114 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete items in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import + +import six +import asyncio +import time +from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from ..exceptions import CosmosResourceNotFoundError +from ..http_constants import StatusCodes +from ..offer import Offer +from ..scripts import ScriptsProxy +from ..partition_key import NonePartitionKeyValue + +__all__ = ("ContainerProxy",) + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class ContainerProxy(object): + """An interface to interact with a specific DB Container. + + This class should not be instantiated directly. Instead, use the + :func:`DatabaseProxy.get_container_client` method to get an existing + container, or the :func:`Database.create_container` method to create a + new container. + + A container in an Azure Cosmos DB SQL API database is a collection of + documents, each of which is represented as an Item. + + :ivar str id: ID (name) of the container + :ivar str session_token: The session token for the container. + """ + + def __init__(self, client_connection, database_link, id, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None + self.client_connection = client_connection + self.id = id + self._properties = properties + self.container_link = u"{}/colls/{}".format(database_link, self.id) + self._is_system_key = None + self._scripts = None # type: Optional[ScriptsProxy] + + def __repr__(self): + # type () -> str + return "".format(self.container_link)[:1024] + + @distributed_trace_async + async def read( + self, + populate_query_metrics=None, # type: Optional[bool] + populate_partition_key_range_statistics=None, # type: Optional[bool] + populate_quota_info=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Read the container properties. + + :param populate_query_metrics: Enable returning query metrics in response headers. + :param populate_partition_key_range_statistics: Enable returning partition key + range statistics in response headers. + :param populate_quota_info: Enable returning collection storage quota information in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. + This includes if the container does not exist. + :returns: Dict representing the retrieved container. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if populate_partition_key_range_statistics is not None: + request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics + if populate_quota_info is not None: + request_options["populateQuotaInfo"] = populate_quota_info + + collection_link = self.container_link + self._properties = await self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index 7f8d20acd98e..1c15f64ce77b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -30,7 +30,7 @@ from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options -# from .container import ContainerProxy +from .container import ContainerProxy from ..offer import Offer from ..http_constants import StatusCodes from ..exceptions import CosmosResourceNotFoundError @@ -111,4 +111,32 @@ async def read(self, populate_query_metrics=None, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return cast('Dict[str, Any]', self._properties) \ No newline at end of file + return cast('Dict[str, Any]', self._properties) + + def get_container_client(self, container): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy + """Get a `ContainerProxy` for a container with specified ID (name). + + :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, + or a dict representing the properties of the container to be retrieved. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START get_container] + :end-before: [END get_container] + :language: python + :dedent: 0 + :caption: Get an existing container, handling a failure if encountered: + :name: get_container + """ + if isinstance(container, ContainerProxy): + id_value = container.id + else: + try: + id_value = container["id"] + except TypeError: + id_value = container + + return ContainerProxy(self.client_connection, self.database_link, id_value) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index d9d703496afd..362d434e7cf1 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -100,13 +100,13 @@ def get_test_item(): return async_item db_name = "AsyncDB" -c_name = "AsyncContainer" +cont_name = "AsyncContainer" def create_test(): client = SyncClient(endpoint, key) db = client.create_database(id=db_name) container = db.create_container( - id=c_name, + id=cont_name, partition_key=PartitionKey(path="/id")) ids = [] for i in range(10): @@ -135,6 +135,10 @@ async def with_read_test(): if db: print(db) x = await db.read() print(x) + cont = db.get_container_client(cont_name) + if cont: print(cont) + x = await cont.read() + print(x) async def main(): await with_read_test() From 47f4af5fd19f286184057edda1df5cd66eee599a Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 2 Sep 2021 13:01:11 -0400 Subject: [PATCH 10/56] async item read --- .../aio/_cosmos_client_connection_async.py | 29 ++++++- .../azure/cosmos/aio/container.py | 78 ++++++++++++++++++- .../azure-cosmos/samples/simon_testfile.py | 7 +- 3 files changed, 110 insertions(+), 4 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index b199493bfa04..9230f63cb216 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -296,6 +296,27 @@ async def ReadContainer(self, collection_link, options=None, **kwargs): collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return await self.Read(path, "colls", collection_id, None, options, **kwargs) + async def ReadItem(self, document_link, options=None, **kwargs): + """Reads a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The read Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return await self.Read(path, "docs", document_id, None, options, **kwargs) + async def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Reads a Azure Cosmos resource and returns it. @@ -345,4 +366,10 @@ async def __Get(self, path, request_params, req_headers, **kwargs): request=request, request_data=None, **kwargs - ) \ No newline at end of file + ) + + @staticmethod + def _return_undefined_or_empty_partition_key(is_system_key): + if is_system_key: + return _Empty + return _Undefined \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 6a81de736c1f..e75e8cd135c7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -71,6 +71,33 @@ def __repr__(self): # type () -> str return "".format(self.container_link)[:1024] + async def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = await self.read() + return self._properties + + @property + async def is_system_key(self): + # type: () -> bool + if self._is_system_key is None: + properties = await self._get_properties() + self._is_system_key = ( + properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False + ) + return cast('bool', self._is_system_key) + + def _get_document_link(self, item_or_link): + # type: (Union[Dict[str, Any], str]) -> str + if isinstance(item_or_link, six.string_types): + return u"{}/docs/{}".format(self.container_link, item_or_link) + return item_or_link["_self"] + + def _set_partition_key(self, partition_key): + if partition_key == NonePartitionKeyValue: + return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) #might have to await here + return partition_key + @distributed_trace_async async def read( self, @@ -111,4 +138,53 @@ async def read( if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return cast('Dict[str, Any]', self._properties) \ No newline at end of file + return cast('Dict[str, Any]', self._properties) + + @distributed_trace_async + async def read_item( + self, + item, # type: Union[str, Dict[str, Any]] + partition_key, # type: Any + populate_query_metrics=None, # type: Optional[bool] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Get the item identified by `item`. + + :param item: The ID (name) or dict representing item to retrieve. + :param partition_key: Partition key for the item to retrieve. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Dict representing the item to be retrieved. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START update_item] + :end-before: [END update_item] + :language: python + :dedent: 0 + :caption: Get an item from the database and update one of its properties: + :name: update_item + """ + doc_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = await self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 362d434e7cf1..f2eaa18b69e8 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -1,10 +1,10 @@ import sys +from time import time sys.path.append(r"C:\Users\simonmoreno\Repos\azure-sdk-for-python\sdk\cosmos\azure-cosmos") -from azure.cosmos import container -from azure.core.tracing.decorator import distributed_trace import asyncio +import time from azure.cosmos.aio.cosmos_client import CosmosClient as AsyncClient from azure.cosmos.cosmos_client import CosmosClient as SyncClient import azure.cosmos.exceptions as exceptions @@ -101,6 +101,7 @@ def get_test_item(): db_name = "AsyncDB" cont_name = "AsyncContainer" +item_name = "Async_011deab0-bb11-47fd-8b66-b5b8d81d8c73" def create_test(): client = SyncClient(endpoint, key) @@ -139,6 +140,8 @@ async def with_read_test(): if cont: print(cont) x = await cont.read() print(x) + x = await cont.read_item(item=item_name, partition_key=item_name) + print(x) async def main(): await with_read_test() From c97c94643d72977c1a98afab12ecda6864245c66 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 3 Sep 2021 12:14:21 -0400 Subject: [PATCH 11/56] cleaning up --- sdk/cosmos/azure-cosmos/azure/cosmos/_base.py | 2 +- .../azure-cosmos/azure/cosmos/container.py | 58 -------- sdk/cosmos/azure-cosmos/samples/heroes.py | 97 -------------- .../azure-cosmos/samples/simon_testfile.py | 125 ++++++------------ 4 files changed, 38 insertions(+), 244 deletions(-) delete mode 100644 sdk/cosmos/azure-cosmos/samples/heroes.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 46b7d46918d5..0c06891a549c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -175,7 +175,7 @@ def GetHeaders( # pylint: disable=too-many-statements,too-many-branches if options.get("consistencyLevel"): consistency_level = options["consistencyLevel"] headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level - elif default_client_consistency_level is not None: # Why not just check for `default_client_consistency_level` + elif default_client_consistency_level is not None: consistency_level = default_client_consistency_level headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 1e8b9f51d483..7286b2b8d0b4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -25,8 +25,6 @@ from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import import six -import asyncio -import time from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection @@ -498,7 +496,6 @@ def create_item( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. :rtype: dict[str, Any] """ - start = time.time() request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -517,61 +514,6 @@ def create_item( ) if response_hook: response_hook(self.client_connection.last_response_headers, result) - print(f"Create item took {(time.time() - start) * 1000} ms") - return result - - @distributed_trace - async def create_item_aio( - self, - body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - indexing_directive=None, # type: Optional[Any] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, str] - """Create an item in the container. - - To update or replace an existing item, use the - :func:`ContainerProxy.upsert_item` method. - - :param body: A dict-like object representing the item to create. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :param indexing_directive: Indicate whether the document should be omitted from indexing. - :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource - has changed, and act according to the condition specified by the `match_condition` parameter. - :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. - :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: A dict representing the new item. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. - :rtype: dict[str, Any] - """ - start = time.time() - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - - request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) - if populate_query_metrics: - request_options["populateQueryMetrics"] = populate_query_metrics - if pre_trigger_include is not None: - request_options["preTriggerInclude"] = pre_trigger_include - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include - if indexing_directive is not None: - request_options["indexingDirective"] = indexing_directive - - result = await self.client_connection.CreateItemAIO( - database_or_container_link=self.container_link, document=body, options=request_options, **kwargs - ) - if response_hook: - response_hook(self.client_connection.last_response_headers, result) #what is this doing? can't find function - print(f"Create item took {(time.time() - start) * 1000} ms") return result @distributed_trace diff --git a/sdk/cosmos/azure-cosmos/samples/heroes.py b/sdk/cosmos/azure-cosmos/samples/heroes.py deleted file mode 100644 index 65b4f5ece427..000000000000 --- a/sdk/cosmos/azure-cosmos/samples/heroes.py +++ /dev/null @@ -1,97 +0,0 @@ -import uuid - -def get_generic_hero(): - hero_item = { - 'id': 'Generic_Hero_' + str(uuid.uuid4()), - 'lastName': 'Smith', - 'parents': None, - 'children': None, - 'address': { - 'state': 'FL', - 'city': 'Miami' - }, - 'saved': ['block'], - 'professional': False, - 'company': None - } - return hero_item - -def get_batman(): - hero_item = { - 'id': 'Batman', - 'lastName': 'Wayne', - 'parents': None, - 'children': None, - 'address': { - 'state': 'WA', - 'city': 'Gotham' - }, - 'saved': ['state', 'city'], - 'professional': True, - 'company': 'DC' - } - return hero_item - -def get_flash(): - hero_item = { - 'id': 'Flash', - 'lastName': 'Allen', - 'parents': None, - 'children': None, - 'address': { - 'state': 'NY', - 'city': 'New York' - }, - 'saved': ['world','country'], - 'professional': True, - 'company': 'DC' - } - return hero_item - -def get_superman(): - hero_item = { - 'id': 'Superman', - 'lastName': 'Kent', - 'parents': None, - 'children': None, - 'address': { - 'state': 'WA', - 'city': 'Metropolis' - }, - 'saved': ['universe','world','country', 'state'], - 'professional': True, - 'company': 'DC' - } - return hero_item - -def get_spider(): - hero_item = { - 'id': 'Spiderman', - 'lastName': 'Parker', - 'parents': None, - 'children': None, - 'address': { - 'state': 'NY', - 'city': 'New York' - }, - 'saved': ['galaxy','world','country'], - 'professional': True, - 'company': 'Marvel' - } - return hero_item - -def get_iron(): - hero_item = { - 'id': 'Ironman', - 'lastName': 'Stark', - 'parents': None, - 'children': None, - 'address': { - 'state': 'NY', - 'city': 'New York' - }, - 'saved': ['galaxy','world','country'], - 'professional': True, - 'company': 'Marvel' - } - return hero_item \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index f2eaa18b69e8..c7ff80e57f15 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -10,80 +10,8 @@ import azure.cosmos.exceptions as exceptions from azure.cosmos.partition_key import PartitionKey -import config -import heroes - -endpoint = '' -key = '' - -def creation(): - - # - client = SyncClient(endpoint, key) - # - database_name = 'MockHeroesDatabase' - database = client.create_database_if_not_exists(id=database_name) - # - - container_name = 'mockHeroesContainer' - container = database.create_container_if_not_exists( - id=container_name, - partition_key=PartitionKey(path="/lastName"), - offer_throughput=400 - ) - - real_heroes = [heroes.get_superman(), heroes.get_batman(), heroes.get_flash(), heroes.get_spider(), heroes.get_iron()] - generics = [heroes.get_generic_hero(), heroes.get_generic_hero(), heroes.get_generic_hero()] - - for hero in real_heroes: - container.create_item(body=hero) - - for generic in generics: - container.create_item(body=generic) - - for hero in real_heroes: - response = container.read_item(item=hero['id'], partition_key=hero['lastName']) - request_charge = container.client_connection.last_response_headers['x-ms-request-charge'] #! - if hero['id'] == 'Superman': print(container.client_connection.last_response_headers) - print('Read item with id {0}. Operation consumed {1} request units'.format(response['id'], (request_charge))) - - query = "SELECT * FROM c WHERE c.lastName IN ('Kent', 'Parker')" - - items = list(container.query_items( - query=query, - enable_cross_partition_query=True #! - )) - - request_charge = container.client_connection.last_response_headers['x-ms-request-charge'] #! - print('Query returned {0} items. Operation consumed {1} request units'.format(len(items), request_charge)) - -def clean_heroes(): - client = SyncClient(endpoint, key) - database_name = 'MockHeroesDatabase' - database = client.get_database_client(database_name) - container_name = 'mockHeroesContainer' - container = database.get_container_client(container_name) - real_heroes = [heroes.get_superman(), heroes.get_batman(), heroes.get_flash(), heroes.get_spider(), heroes.get_iron()] - for h in real_heroes: - response = container.delete_item(h['id'], partition_key=h['lastName']) - print(response) - -def destroy(): - client = SyncClient(endpoint, key) - database_name = 'MockHeroesDatabase' - response = client.delete_database(database_name) - print(f"Database with name {database_name} has been deleted.") - print(response) - - # for generic in generics: - # container.create_item_aio(body=generic) - -# asyncio.run(createaio()) - -# creation() +endpoint = 'https://simonmoreno-sql.documents.azure.com:443/' +key = 'ix7V0n09yDyiUarBkQnDRAqBwkxXMM6iGq7FlDHmHRlHZPUDRnBu55Vx2gwzd2Mkh6Qyrc8VnJWR6djgnkl8cw==' import uuid @@ -99,37 +27,31 @@ def get_test_item(): } return async_item -db_name = "AsyncDB" -cont_name = "AsyncContainer" -item_name = "Async_011deab0-bb11-47fd-8b66-b5b8d81d8c73" - -def create_test(): +def create_test(db_name, cont_name, num): client = SyncClient(endpoint, key) db = client.create_database(id=db_name) container = db.create_container( id=cont_name, partition_key=PartitionKey(path="/id")) ids = [] - for i in range(10): + for i in range(num): body = get_test_item() - print(body.get("id")) ids.append(body.get("id")) container.create_item(body=body) + print("Created {} items in {} DB successfully".format(num, db_name)) return ids -async def async_read_test(): - # ids = create_test() +async def async_read_test(db_name, cont_name, num): + ids = create_test(db_name, cont_name, num) client = AsyncClient(endpoint, key) if client: print(client) db = client.get_database_client(db_name) if db: print(db) x = await db.read() print(x) - await client.__aexit__() - # container = db.get_container_client(id="AsyncContainer") - # print(container.read()) + await client.close() -async def with_read_test(): +async def with_read_test(db_name, cont_name, item_name): async with AsyncClient(endpoint, key) as client: print(client) db = client.get_database_client(db_name) @@ -143,8 +65,35 @@ async def with_read_test(): x = await cont.read_item(item=item_name, partition_key=item_name) print(x) +def timed_sync(db2, cont2, num, ids): + client = SyncClient(endpoint, key) + db = client.get_database_client(db2) + cont = db.get_container_client(cont2) + start = time.time() + for id in ids: + x = cont.read_item(item=id, partition_key=id) + if not x: + print("Error retrieving item {}".format(id)) + print("Sync client retrieved {} items in {} seconds".format(num, time.time() - start)) + +async def timed_async(db1, cont1, num, ids): + async with AsyncClient(endpoint, key) as client: + db = client.get_database_client(db1) + cont = db.get_container_client(cont1) + start = time.time() + for id in ids: + x = await cont.read_item(item=id, partition_key=id) + if not x: + print("Error retrieving item {}".format(id)) + print("Async client retrieved {} items in {} seconds".format(num, time.time() - start)) + async def main(): - await with_read_test() + db = "db01" + cont = "c01" + num = 100000 + ids = create_test(db, cont, num) + timed_sync(db,cont,num,ids) + await timed_async(db,cont,num,ids) if __name__ == "__main__": loop = asyncio.get_event_loop() From fcd95db1f422b7ff8b28bae84915c403c5a1143b Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 13 Sep 2021 13:50:37 -0400 Subject: [PATCH 12/56] create item/ database methods --- .../aio/_cosmos_client_connection_async.py | 307 +++++++++++++++++- .../azure/cosmos/aio/container.py | 52 +++ .../azure/cosmos/aio/cosmos_client.py | 98 +++++- .../azure-cosmos/azure/cosmos/aio/database.py | 158 +++++++++ .../azure-cosmos/samples/simon_testfile.py | 92 ++++-- 5 files changed, 673 insertions(+), 34 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 9230f63cb216..9266495e51cb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -256,6 +256,145 @@ async def GetDatabaseAccount(self, url_connection=None, **kwargs): ) return database_account + async def CreateDatabase(self, database, options=None, **kwargs): + """Creates a database. + + :param dict database: + The Azure Cosmos database to create. + :param dict options: + The request options for the request. + + :return: + The Database that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(database) + path = "/dbs" + return await self.Create(database, path, "dbs", None, None, options, **kwargs) + + async def CreateContainer(self, database_link, collection, options=None, **kwargs): + """Creates a collection in a database. + + :param str database_link: + The link to the database. + :param dict collection: + The Azure Cosmos collection to create. + :param dict options: + The request options for the request. + + :return: The Collection that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return await self.Create(collection, path, "colls", database_id, None, options, **kwargs) + + async def CreateItem(self, database_or_container_link, document, options=None, **kwargs): + """Creates a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to create. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The created Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable default in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + + if base.IsItemContainerLink(database_or_container_link): + options = await self._AddPartitionKey(database_or_container_link, document, options) + + return await self.Create(document, path, "docs", collection_id, None, options, **kwargs) + + async def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Creates a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The created Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + # Create will use WriteEndpoint since it uses POST operation + + request_params = _request_object.RequestObject(typ, documents._OperationType.Create) + result, self.last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) + + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + async def __Post(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'POST' http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.post(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + async def ReadDatabase(self, database_link, options=None, **kwargs): """Reads a database. @@ -368,8 +507,174 @@ async def __Get(self, path, request_params, req_headers, **kwargs): **kwargs ) + def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): + """ + Updates session if necessary. + + :param dict response_result: + :param dict response_headers: + :param dict response_headers + + :return: + None, but updates the client session if necessary. + + """ + + # if this request was made with consistency level as session, then update the session + if response_result is None or response_headers is None: + return + + is_session_consistency = False + if http_constants.HttpHeaders.ConsistencyLevel in request_headers: + if documents.ConsistencyLevel.Session == request_headers[http_constants.HttpHeaders.ConsistencyLevel]: + is_session_consistency = True + + if is_session_consistency: + # update session + self.session.update_session(response_result, response_headers) + + PartitionResolverErrorMessage = ( + "Couldn't find any partition resolvers for the database link provided. " + + "Ensure that the link you used when registering the partition resolvers " + + "matches the link provided or you need to register both types of database " + + "link(self link as well as ID based link)." + ) + + # Gets the collection id and path for the document + def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): + + if not database_or_container_link: + raise ValueError("database_or_container_link is None or empty.") + + if document is None: + raise ValueError("document is None.") + + CosmosClientConnection.__ValidateResource(document) + document = document.copy() + if not document.get("id") and not options.get("disableAutomaticIdGeneration"): + document["id"] = base.GenerateGuidId() + + collection_link = database_or_container_link + + if base.IsDatabaseLink(database_or_container_link): + partition_resolver = self.GetPartitionResolver(database_or_container_link) + + if partition_resolver is not None: + collection_link = partition_resolver.ResolveForCreate(document) + else: + raise ValueError(CosmosClientConnection.PartitionResolverErrorMessage) + + path = base.GetPathFromLink(collection_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, document, path + + def RegisterPartitionResolver(self, database_link, partition_resolver): + """Registers the partition resolver associated with the database link + + :param str database_link: + Database Self Link or ID based link. + :param object partition_resolver: + An instance of PartitionResolver. + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + if partition_resolver is None: + raise ValueError("partition_resolver is None.") + + self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver} + + def GetPartitionResolver(self, database_link): + """Gets the partition resolver associated with the database link + + :param str database_link: + Database self link or ID based link. + + :return: + An instance of PartitionResolver. + :rtype: object + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) + + # Adds the partition key to options + async def _AddPartitionKey(self, collection_link, document, options): + collection_link = base.TrimBeginningAndEndingSlashes(collection_link) + + # TODO: Refresh the cache if partition is extracted automatically and we get a 400.1001 + + # If the document collection link is present in the cache, then use the cached partitionkey definition + if collection_link in self.partition_key_definition_cache: + partitionKeyDefinition = self.partition_key_definition_cache.get(collection_link) + # Else read the collection from backend and add it to the cache + else: + collection = await self.ReadContainer(collection_link) + partitionKeyDefinition = collection.get("partitionKey") + self.partition_key_definition_cache[collection_link] = partitionKeyDefinition + + # If the collection doesn't have a partition key definition, skip it as it's a legacy collection + if partitionKeyDefinition: + # If the user has passed in the partitionKey in options use that elase extract it from the document + if "partitionKey" not in options: + partitionKeyValue = self._ExtractPartitionKey(partitionKeyDefinition, document) + options["partitionKey"] = partitionKeyValue + + return options + + # Extracts the partition key from the document using the partitionKey definition + def _ExtractPartitionKey(self, partitionKeyDefinition, document): + + # Parses the paths into a list of token each representing a property + partition_key_parts = base.ParsePaths(partitionKeyDefinition.get("paths")) + # Check if the partitionKey is system generated or not + is_system_key = partitionKeyDefinition["systemKey"] if "systemKey" in partitionKeyDefinition else False + + # Navigates the document to retrieve the partitionKey specified in the paths + return self._retrieve_partition_key(partition_key_parts, document, is_system_key) + + # Navigates the document to retrieve the partitionKey specified in the partition key parts + def _retrieve_partition_key(self, partition_key_parts, document, is_system_key): + expected_matchCount = len(partition_key_parts) + matchCount = 0 + partitionKey = document + + for part in partition_key_parts: + # At any point if we don't find the value of a sub-property in the document, we return as Undefined + if part not in partitionKey: + return self._return_undefined_or_empty_partition_key(is_system_key) + + partitionKey = partitionKey.get(part) + matchCount += 1 + # Once we reach the "leaf" value(not a dict), we break from loop + if not isinstance(partitionKey, dict): + break + + # Match the count of hops we did to get the partitionKey with the length of + # partition key parts and validate that it's not a dict at that level + if (matchCount != expected_matchCount) or isinstance(partitionKey, dict): + return self._return_undefined_or_empty_partition_key(is_system_key) + + return partitionKey + @staticmethod def _return_undefined_or_empty_partition_key(is_system_key): if is_system_key: return _Empty - return _Undefined \ No newline at end of file + return _Undefined + + @staticmethod + def __ValidateResource(resource): + id_ = resource.get("id") + if id_: + try: + if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1: + raise ValueError("Id contains illegal chars.") + + if id_[-1] == " ": + raise ValueError("Id ends with a space.") + except AttributeError: + raise_with_traceback(TypeError, message="Id type must be a string.") \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index e75e8cd135c7..9cd4f64e2f03 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -98,6 +98,58 @@ def _set_partition_key(self, partition_key): return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) #might have to await here return partition_key + @distributed_trace_async + async def create_item( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Create an item in the container. + + To update or replace an existing item, use the + :func:`ContainerProxy.upsert_item` method. + + :param body: A dict-like object representing the item to create. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :param indexing_directive: Indicate whether the document should be omitted from indexing. + :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) + if populate_query_metrics: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + if indexing_directive is not None: + request_options["indexingDirective"] = indexing_directive + + result = await self.client_connection.CreateItem( + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def read( self, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index d2f618b49235..b80055f15826 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -22,7 +22,8 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, Optional, Union, cast, Iterable, List # pylint: disable=unused-import +from typing import Any, Dict, Optional, Union, cast, Iterable, List +from azure.core.tracing.decorator_async import distributed_trace_async # pylint: disable=unused-import import six from azure.core.tracing.decorator import distributed_trace # type: ignore @@ -172,6 +173,101 @@ def _get_database_link(database_or_id): database_id = cast("Dict[str, str]", database_or_id)["id"] return "dbs/{}".format(database_id) + @distributed_trace_async + async def create_database( # pylint: disable=redefined-builtin + self, + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> DatabaseProxy + """ + Create a new database with the given ID (name). + + :param id: ID (name) of the database to create. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A DatabaseProxy instance representing the new database. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.exceptions.CosmosResourceExistsError: Database with the given ID already exists. + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_database] + :end-before: [END create_database] + :language: python + :dedent: 0 + :caption: Create a database in the Cosmos DB account: + :name: create_database + """ + + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + result = await self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) + + @distributed_trace_async + async def create_database_if_not_exists( # pylint: disable=redefined-builtin + self, + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> DatabaseProxy + """ + Create the database if it does not exist already. + + If the database already exists, the existing settings are returned. + + ..note:: + This function does not check or update existing database settings or + offer throughput if they differ from what is passed in. + + :param id: ID (name) of the database to read or create. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A DatabaseProxy instance representing the database. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The database read or creation failed. + """ + try: + database_proxy = self.get_database_client(id) + await database_proxy.read( + populate_query_metrics=populate_query_metrics, + **kwargs + ) + print("Read DB with success") + return database_proxy + except CosmosResourceNotFoundError: + return await self.create_database( + id, + populate_query_metrics=populate_query_metrics, + offer_throughput=offer_throughput, + **kwargs + ) + def get_database_client(self, database): # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy """Retrieve an existing database with the ID (name) `id`. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index 1c15f64ce77b..da5c58defd35 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -83,6 +83,164 @@ def __repr__(self): # type () -> str return "".format(self.database_link)[:1024] + @distributed_trace_async + async def create_container( + self, + id, # type: str # pylint: disable=redefined-builtin + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Create a new container with the given ID (name). + + If a container with the given ID already exists, a CosmosResourceExistsError is raised. + + :param id: ID (name) of container to create. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param offer_throughput: The provisioned throughput for this offer. + :param unique_key_policy: The unique key policy to apply to the container. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :returns: A `ContainerProxy` instance representing the new container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container creation failed. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 0 + :caption: Create a container with default settings: + :name: create_container + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_container_with_settings] + :end-before: [END create_container_with_settings] + :language: python + :dedent: 0 + :caption: Create a container with specific settings; in this case, a custom partition key: + :name: create_container_with_settings + """ + definition = dict(id=id) # type: Dict[str, Any] + if partition_key is not None: + definition["partitionKey"] = partition_key + if indexing_policy is not None: + if indexing_policy.get("indexingMode") is IndexingMode.Lazy: + warnings.warn( + "Lazy indexing mode has been deprecated. Mode will be set to consistent indexing by the backend.", + DeprecationWarning + ) + definition["indexingPolicy"] = indexing_policy + if default_ttl is not None: + definition["defaultTtl"] = default_ttl + if unique_key_policy is not None: + definition["uniqueKeyPolicy"] = unique_key_policy + if conflict_resolution_policy is not None: + definition["conflictResolutionPolicy"] = conflict_resolution_policy + + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + if analytical_storage_ttl is not None: + definition["analyticalStorageTtl"] = analytical_storage_ttl + + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + data = await self.client_connection.CreateContainer( + database_link=self.database_link, collection=definition, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) + + @distributed_trace_async + async def create_container_if_not_exists( + self, + id, # type: str # pylint: disable=redefined-builtin + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Create a container if it does not exist already. + + If the container already exists, the existing settings are returned. + Note: it does not check or update the existing container settings or offer throughput + if they differ from what was passed into the method. + + :param id: ID (name) of container to read or create. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param offer_throughput: The provisioned throughput for this offer. + :param unique_key_policy: The unique key policy to apply to the container. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :returns: A `ContainerProxy` instance representing the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container read or creation failed. + :rtype: ~azure.cosmos.ContainerProxy + """ + + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + try: + container_proxy = self.get_container_client(id) + await container_proxy.read( + populate_query_metrics=populate_query_metrics, + **kwargs + ) + print("Read CONTAINER with success") + return container_proxy + except CosmosResourceNotFoundError: + return await self.create_container( + id=id, + partition_key=partition_key, + indexing_policy=indexing_policy, + default_ttl=default_ttl, + populate_query_metrics=populate_query_metrics, + offer_throughput=offer_throughput, + unique_key_policy=unique_key_policy, + conflict_resolution_policy=conflict_resolution_policy, + analytical_storage_ttl=analytical_storage_ttl + ) + @distributed_trace_async async def read(self, populate_query_metrics=None, **kwargs): # type: (Optional[bool], Any) -> Dict[str, Any] diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index c7ff80e57f15..bd2e42a7d5de 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -10,8 +10,8 @@ import azure.cosmos.exceptions as exceptions from azure.cosmos.partition_key import PartitionKey -endpoint = 'https://simonmoreno-sql.documents.azure.com:443/' -key = 'ix7V0n09yDyiUarBkQnDRAqBwkxXMM6iGq7FlDHmHRlHZPUDRnBu55Vx2gwzd2Mkh6Qyrc8VnJWR6djgnkl8cw==' +endpoint = '' +key = '' import uuid @@ -41,31 +41,37 @@ def create_test(db_name, cont_name, num): print("Created {} items in {} DB successfully".format(num, db_name)) return ids -async def async_read_test(db_name, cont_name, num): - ids = create_test(db_name, cont_name, num) - client = AsyncClient(endpoint, key) - if client: print(client) - db = client.get_database_client(db_name) - if db: print(db) - x = await db.read() - print(x) - await client.close() - -async def with_read_test(db_name, cont_name, item_name): - async with AsyncClient(endpoint, key) as client: - print(client) - db = client.get_database_client(db_name) - if db: print(db) - x = await db.read() - print(x) - cont = db.get_container_client(cont_name) - if cont: print(cont) - x = await cont.read() - print(x) - x = await cont.read_item(item=item_name, partition_key=item_name) - print(x) - -def timed_sync(db2, cont2, num, ids): +def timed_sync_create(db_name, cont_name, num): + client = SyncClient(endpoint, key) + db = client.create_database(id=db_name) + container = db.create_container( + id=cont_name, + partition_key=PartitionKey(path="/id")) + ids = [] + start = time.time() + for i in range(num): + body = get_test_item() + ids.append(body.get("id")) + container.create_item(body=body) + print("Sync client created {} items in {} seconds".format(num, time.time() - start)) + return ids + +async def timed_async_create(db_name, cont_name, num): + async with AsyncClient(endpoint, key) as client: + db = await client.create_database_if_not_exists(id=db_name) + cont = await db.create_container_if_not_exists( + id=cont_name, + partition_key=PartitionKey(path="/id")) + ids = [] + start = time.time() + for i in range(num): + body = get_test_item() + ids.append(body.get("id")) + await cont.create_item(body=body) + print("Async client created {} items in {} seconds".format(num, time.time() - start)) + return ids + +def timed_sync_read(db2, cont2, num, ids): client = SyncClient(endpoint, key) db = client.get_database_client(db2) cont = db.get_container_client(cont2) @@ -76,7 +82,7 @@ def timed_sync(db2, cont2, num, ids): print("Error retrieving item {}".format(id)) print("Sync client retrieved {} items in {} seconds".format(num, time.time() - start)) -async def timed_async(db1, cont1, num, ids): +async def timed_async_read(db1, cont1, num, ids): async with AsyncClient(endpoint, key) as client: db = client.get_database_client(db1) cont = db.get_container_client(cont1) @@ -87,13 +93,35 @@ async def timed_async(db1, cont1, num, ids): print("Error retrieving item {}".format(id)) print("Async client retrieved {} items in {} seconds".format(num, time.time() - start)) -async def main(): +async def read_tests(): db = "db01" cont = "c01" - num = 100000 + num = 1000 ids = create_test(db, cont, num) - timed_sync(db,cont,num,ids) - await timed_async(db,cont,num,ids) + timed_sync_read(db,cont,num,ids) + await timed_async_read(db,cont,num,ids) + +async def create_tests(): + db1, db2 = "db01", "db02" + cont1, cont2 = "c01", "c02" + num = 10 + ids1 = timed_sync_create(db1,cont1,num) + ids2 = await timed_async_create(db2,cont2,num) + print(len(ids1) == len(ids2)) + +def user_test(): + client = SyncClient(endpoint, key) + db = client.get_database_client("xusud") + use = db.get_user_client(user="testid") + data = use.read() + print(data) + perms = use.list_permissions() + print(list(perms)) + +async def main(): + # await read_tests() + await create_tests() + if __name__ == "__main__": loop = asyncio.get_event_loop() From 36c5b902c42925e01e0eec9fb043fd910fd50bc4 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 13 Sep 2021 16:56:19 -0400 Subject: [PATCH 13/56] item delete working --- .../aio/_cosmos_client_connection_async.py | 119 ++++++++++++++++++ .../azure/cosmos/aio/container.py | 50 +++++++- .../azure/cosmos/aio/cosmos_client.py | 36 +++++- .../azure-cosmos/azure/cosmos/aio/database.py | 51 +++++++- .../azure-cosmos/samples/simon_testfile.py | 53 +++++++- 5 files changed, 301 insertions(+), 8 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 9266495e51cb..47c87f0a20e3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -507,6 +507,125 @@ async def __Get(self, path, request_params, req_headers, **kwargs): **kwargs ) + async def DeleteDatabase(self, database_link, options=None, **kwargs): + """Deletes a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: + The deleted Database. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return await self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) + + async def DeleteContainer(self, collection_link, options=None, **kwargs): + """Deletes a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The deleted Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return await self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) + + + async def DeleteItem(self, document_link, options=None, **kwargs): + """Deletes a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The deleted Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return await self.DeleteResource(path, "docs", document_id, None, options, **kwargs) + + async def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Deletes a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The deleted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "delete", path, id, typ, options) + # Delete will use WriteEndpoint since it uses DELETE operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) + result, self.last_response_headers = await self.__Delete(path, request_params, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + + return result + + async def __Delete(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'DELETE' http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.delete(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): """ Updates session if necessary. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 9cd4f64e2f03..6a911ed5b35e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -228,7 +228,7 @@ async def read_item( doc_link = self._get_document_link(item) request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - + if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) if populate_query_metrics is not None: @@ -239,4 +239,50 @@ async def read_item( result = await self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) - return result \ No newline at end of file + return result + + @distributed_trace_async + async def delete_item( + self, + item, # type: Union[Dict[str, Any], str] + partition_key, # type: Any + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete the specified item from the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be deleted. + :param partition_key: Specifies the partition key value for the item. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The item does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + document_link = self._get_document_link(item) + result = await self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index b80055f15826..267660878f33 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -258,7 +258,6 @@ async def create_database_if_not_exists( # pylint: disable=redefined-builtin populate_query_metrics=populate_query_metrics, **kwargs ) - print("Read DB with success") return database_proxy except CosmosResourceNotFoundError: return await self.create_database( @@ -286,4 +285,37 @@ def get_database_client(self, database): except TypeError: id_value = database - return DatabaseProxy(self.client_connection, id_value) \ No newline at end of file + return DatabaseProxy(self.client_connection, id_value) + + @distributed_trace_async + async def delete_database( + self, + database, # type: Union[str, DatabaseProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete the database with the given ID (name). + + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` + instance of the database to delete. + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the database couldn't be deleted. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + database_link = self._get_database_link(database) + await self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index da5c58defd35..dcd6d085be7e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -83,6 +83,21 @@ def __repr__(self): # type () -> str return "".format(self.database_link)[:1024] + @staticmethod + def _get_container_id(container_or_id): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str + if isinstance(container_or_id, six.string_types): + return container_or_id + try: + return cast("ContainerProxy", container_or_id).id + except AttributeError: + pass + return cast("Dict[str, str]", container_or_id)["id"] + + def _get_container_link(self, container_or_id): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str + return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) + @distributed_trace_async async def create_container( self, @@ -226,7 +241,6 @@ async def create_container_if_not_exists( populate_query_metrics=populate_query_metrics, **kwargs ) - print("Read CONTAINER with success") return container_proxy except CosmosResourceNotFoundError: return await self.create_container( @@ -297,4 +311,37 @@ def get_container_client(self, container): except TypeError: id_value = container - return ContainerProxy(self.client_connection, self.database_link, id_value) \ No newline at end of file + return ContainerProxy(self.client_connection, self.database_link, id_value) + + @distributed_trace_async + async def delete_container( + self, + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete a container. + + :param container: The ID (name) of the container to delete. You can either + pass in the ID of the container to delete, a :class:`ContainerProxy` instance or + a dict representing the properties of the container. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the container couldn't be deleted. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + collection_link = self._get_container_link(container) + result = await self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index bd2e42a7d5de..d92b48ded2df 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -27,6 +27,48 @@ def get_test_item(): } return async_item +async def async_crud_test(): + db_name = "asyncccc" + cont_name = "contttt" + async with AsyncClient(endpoint, key) as client: + db = await client.create_database(db_name) + print("Created DB, now reading and attempting create_if_not_exist") + print(await db.read()) + db = await client.create_database_if_not_exists(db_name) + print("Create if not exist had no problems, deleting DB now") + await client.delete_database(db_name) + print("DB deleted, now attempting read") + try: + await db.read() + except: + print("Error returned successfully for reading DB") + print("Re-creating DB for testing container") + db = await client.create_database(db_name) + cont = await db.create_container(id=cont_name, partition_key=PartitionKey(path="/id")) + print("Created CONT, now reading and attempting create_if_not_exists") + await cont.read() + cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/id")) + print("Create if not exist had no problems, deleting CONT now") + await db.delete_container(cont_name) + print("CONT deleted, now attempting read") + try: + await cont.read() + except: + print("Error returned succesfully") + print("Re-creating CONT for testing items") + cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/id")) + body = get_test_item() + await cont.create_item(body=body) + print("created item, now reading") + await cont.read_item(item=body.get("id"), partition_key=body.get("id")) + print("now deleting item and attempting to read") + await cont.delete_item(item=body.get("id"), partition_key=body.get("id")) + try: + await cont.read_item(item=body.get("id"), partition_key=body.get("id")) + except: + print("item delete failed successfully, now cleaning up") + await client.delete_database(db_name) + def create_test(db_name, cont_name, num): client = SyncClient(endpoint, key) db = client.create_database(id=db_name) @@ -106,7 +148,7 @@ async def create_tests(): cont1, cont2 = "c01", "c02" num = 10 ids1 = timed_sync_create(db1,cont1,num) - ids2 = await timed_async_create(db2,cont2,num) + ids2 = await timed_async_create(db1,cont1,num) print(len(ids1) == len(ids2)) def user_test(): @@ -118,9 +160,16 @@ def user_test(): perms = use.list_permissions() print(list(perms)) +def wrong_test(): + client = SyncClient(endpoint, key) + db = client.get_database_client("db01") + cont = db.get_container_client("c01") + cont.read() + cont.read_item(item="Async_c7997ca0-69c8-46f3-a9a3-5d85f50bafdf") + async def main(): # await read_tests() - await create_tests() + await async_crud_test() if __name__ == "__main__": From 44db2a2f5aa37acba1d1c15b861682454899bf27 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 16 Sep 2021 16:32:37 -0400 Subject: [PATCH 14/56] docs replace functionality missing upsert and other resources --- .../aio/_cosmos_client_connection_async.py | 121 +++++++++++++++- .../azure/cosmos/aio/container.py | 49 +++++++ .../azure-cosmos/azure/cosmos/aio/database.py | 136 ++++++++++++++---- .../azure-cosmos/samples/simon_testfile.py | 77 ++++++---- 4 files changed, 322 insertions(+), 61 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 47c87f0a20e3..14755bfa0557 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -370,7 +370,7 @@ async def Create(self, body, path, typ, id, initial_headers, options=None, **kwa return result async def __Post(self, path, request_params, body, req_headers, **kwargs): - """Azure Cosmos 'POST' http request. + """Azure Cosmos 'POST' async http request. :params str url: :params str path: @@ -483,7 +483,7 @@ async def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # return result async def __Get(self, path, request_params, req_headers, **kwargs): - """Azure Cosmos 'GET' async http request. + """Azure Cosmos 'GET' async async http request. :params str url: :params str path: @@ -507,6 +507,121 @@ async def __Get(self, path, request_params, req_headers, **kwargs): **kwargs ) + async def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): + """Replaces a collection and return it. + + :param str collection_link: + The link to the collection entity. + :param dict collection: + The collection to be used. + :param dict options: + The request options for the request. + + :return: + The new Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return await self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) + + async def ReplaceItem(self, document_link, new_document, options=None, **kwargs): + """Replaces a document and returns it. + + :param str document_link: + The link to the document. + :param dict new_document: + :param dict options: + The request options for the request. + + :return: + The new Document. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(new_document) + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the function so that it remains local For more details on this gotcha, + # please refer http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # Extract the document collection link and add the partition key to options + collection_link = base.GetItemContainerLink(document_link) + options = await self._AddPartitionKey(collection_link, new_document, options) + + return await self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) + + async def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Replaces a Azure Cosmos resource and returns it. + + :param dict resource: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The new Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "put", path, id, typ, options) + # Replace will use WriteEndpoint since it uses PUT operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) + result, self.last_response_headers = await self.__Put(path, request_params, resource, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + async def __Put(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'PUT' async http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.put(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + async def DeleteDatabase(self, database_link, options=None, **kwargs): """Deletes a database. @@ -602,7 +717,7 @@ async def DeleteResource(self, path, typ, id, initial_headers, options=None, **k return result async def __Delete(self, path, request_params, req_headers, **kwargs): - """Azure Cosmos 'DELETE' http request. + """Azure Cosmos 'DELETE' async http request. :params str url: :params str path: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 6a911ed5b35e..164d111c3bc2 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -241,6 +241,55 @@ async def read_item( response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace_async + async def replace_item( + self, + item, # type: Union[str, Dict[str, Any]] + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Replaces the specified item if it exists in the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be replaced. + :param body: A dict-like object representing the item to replace. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the item after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace failed or the item with + given id does not exist. + :rtype: dict[str, Any] + """ + item_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableIdGeneration"] = True + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = await self.client_connection.ReplaceItem( + document_link=item_link, new_document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def delete_item( self, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index dcd6d085be7e..54ca6e644871 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -98,6 +98,36 @@ def _get_container_link(self, container_or_id): # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) + @distributed_trace_async + async def read(self, populate_query_metrics=None, **kwargs): + # type: (Optional[bool], Any) -> Dict[str, Any] + """Read the database properties. + + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :rtype: Dict[Str, Any] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. + """ + # TODO this helper function should be extracted from CosmosClient + from .cosmos_client import CosmosClient + + database_link = CosmosClient._get_database_link(self) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + self._properties = await self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + @distributed_trace_async async def create_container( self, @@ -255,36 +285,6 @@ async def create_container_if_not_exists( analytical_storage_ttl=analytical_storage_ttl ) - @distributed_trace_async - async def read(self, populate_query_metrics=None, **kwargs): - # type: (Optional[bool], Any) -> Dict[str, Any] - """Read the database properties. - - :param bool populate_query_metrics: Enable returning query metrics in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :rtype: Dict[Str, Any] - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. - """ - # TODO this helper function should be extracted from CosmosClient - from .cosmos_client import CosmosClient - - database_link = CosmosClient._get_database_link(self) - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - - self._properties = await self.client_connection.ReadDatabase( - database_link, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, self._properties) - - return cast('Dict[str, Any]', self._properties) - def get_container_client(self, container): # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy """Get a `ContainerProxy` for a container with specified ID (name). @@ -313,6 +313,82 @@ def get_container_client(self, container): return ContainerProxy(self.client_connection, self.database_link, id_value) + @distributed_trace_async + async def replace_container( + self, + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Reset the properties of the container. + + Property changes are persisted immediately. Any properties not specified + will be reset to their default values. + + :param container: The ID (name), dict representing the properties or + :class:`ContainerProxy` instance of the container to be replaced. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be replaced. + This includes if the container with given id does not exist. + :returns: A `ContainerProxy` instance representing the container after replace completed. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START reset_container_properties] + :end-before: [END reset_container_properties] + :language: python + :dedent: 0 + :caption: Reset the TTL property on a container, and display the updated properties: + :name: reset_container_properties + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + + container_id = self._get_container_id(container) + container_link = self._get_container_link(container_id) + parameters = { + key: value + for key, value in { + "id": container_id, + "partitionKey": partition_key, + "indexingPolicy": indexing_policy, + "defaultTtl": default_ttl, + "conflictResolutionPolicy": conflict_resolution_policy, + }.items() + if value is not None + } + + container_properties = await self.client_connection.ReplaceContainer( + container_link, collection=parameters, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, container_properties) + + return ContainerProxy( + self.client_connection, self.database_link, container_properties["id"], properties=container_properties + ) + @distributed_trace_async async def delete_container( self, diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index d92b48ded2df..fa298a8d7cbe 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -23,17 +23,19 @@ def get_test_item(): 'city': 'Redmond', 'street': '1 Microsoft Way' }, - 'test_object': True + 'test_object': True, + 'lastName': 'Smith' } return async_item async def async_crud_test(): - db_name = "asyncccc" - cont_name = "contttt" + db_name = "crudAsync" + cont_name = "cont" + ttl = 200 async with AsyncClient(endpoint, key) as client: db = await client.create_database(db_name) print("Created DB, now reading and attempting create_if_not_exist") - print(await db.read()) + await db.read() db = await client.create_database_if_not_exists(db_name) print("Create if not exist had no problems, deleting DB now") await client.delete_database(db_name) @@ -42,32 +44,48 @@ async def async_crud_test(): await db.read() except: print("Error returned successfully for reading DB") - print("Re-creating DB for testing container") - db = await client.create_database(db_name) - cont = await db.create_container(id=cont_name, partition_key=PartitionKey(path="/id")) + print("Re-creating DB for testing container methods") + db = await client.create_database_if_not_exists(db_name) + cont = await db.create_container(id=cont_name, partition_key=PartitionKey(path="/lastName")) print("Created CONT, now reading and attempting create_if_not_exists") - await cont.read() - cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/id")) - print("Create if not exist had no problems, deleting CONT now") + c = await cont.read() + cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) + print("Create if not exist had no problems, replacing and deleting CONT now") + assert c.get('defaultTtl') is None + await db.replace_container(container=cont_name, partition_key=PartitionKey(path='/lastName'), default_ttl=ttl) + c = await cont.read() + assert c.get('defaultTtl') == 200 + print("CONT properties changed, now deleting") await db.delete_container(cont_name) print("CONT deleted, now attempting read") try: await cont.read() except: print("Error returned succesfully") - print("Re-creating CONT for testing items") - cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/id")) - body = get_test_item() - await cont.create_item(body=body) - print("created item, now reading") - await cont.read_item(item=body.get("id"), partition_key=body.get("id")) - print("now deleting item and attempting to read") - await cont.delete_item(item=body.get("id"), partition_key=body.get("id")) + print("Re-creating CONT for testing item methods") + cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) + body1 = get_test_item() + await cont.create_item(body=body1) + print("Created item, now reading and then replacing") + body2 = get_test_item() + await cont.replace_item(item=body1["id"], body=body2) + print("Item replaced, now attempting read") + try: + await cont.read_item(item=body1.get("id"), partition_key=body1.get("lastName")) + except: + print("Error returned succesfully, reading and deleting replaced item now") + await cont.read_item(item=body2.get("id"), partition_key=body2.get("lastName")) + await cont.delete_item(item=body2.get("id"), partition_key=body2.get("lastName")) + print("Item deleted, now attempting read") try: - await cont.read_item(item=body.get("id"), partition_key=body.get("id")) + await cont.read_item(item=body2.get("id"), partition_key=body2.get("lastName")) except: - print("item delete failed successfully, now cleaning up") + print("Error returned succesfully, cleaning up account now") await client.delete_database(db_name) + try: + await db.read() + except: + print("All cleaned up") def create_test(db_name, cont_name, num): client = SyncClient(endpoint, key) @@ -154,18 +172,21 @@ async def create_tests(): def user_test(): client = SyncClient(endpoint, key) db = client.get_database_client("xusud") - use = db.get_user_client(user="testid") - data = use.read() + u = db.get_user_client(user="testid") + data = u.read() print(data) - perms = use.list_permissions() + perms = u.list_permissions() print(list(perms)) def wrong_test(): - client = SyncClient(endpoint, key) - db = client.get_database_client("db01") - cont = db.get_container_client("c01") - cont.read() - cont.read_item(item="Async_c7997ca0-69c8-46f3-a9a3-5d85f50bafdf") + # client = SyncClient(endpoint, key) + # db = client.get_database_client("db111") + # cont = db.get_container_client("c111") + # cont.read() + # id = "Async_cc4b235e-ce8e-4b4f-835d-3c29182f0639" + # cont.read_item(item="wow", partition_key=id) + client = SyncClient.from_connection_string("") + print(list(client.list_databases())) async def main(): # await read_tests() From ec5b6ed32f7b5567fe948986429336012c0276e8 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 16 Sep 2021 20:11:23 -0400 Subject: [PATCH 15/56] upsert functionality missing read_all_items and both query methods for container class --- .../aio/_cosmos_client_connection_async.py | 71 +++++++++++++++++++ .../azure/cosmos/aio/container.py | 50 ++++++++++++- .../azure-cosmos/samples/simon_testfile.py | 16 ++++- 3 files changed, 134 insertions(+), 3 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 14755bfa0557..96da0dc99663 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -369,6 +369,77 @@ async def Create(self, body, path, typ, id, initial_headers, options=None, **kwa self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result + async def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): + """Upserts a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to upsert. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The upserted Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + if base.IsItemContainerLink(database_or_container_link): + options = await self._AddPartitionKey(database_or_container_link, document, options) + + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + return await self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) + + async def Upsert(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Upserts a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + + headers[http_constants.HttpHeaders.IsUpsert] = True + + # Upsert will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) + result, self.last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + async def __Post(self, path, request_params, body, req_headers, **kwargs): """Azure Cosmos 'POST' async http request. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 164d111c3bc2..fb1252404063 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -228,7 +228,6 @@ async def read_item( doc_link = self._get_document_link(item) request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) if populate_query_metrics is not None: @@ -241,6 +240,55 @@ async def read_item( response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace_async + async def upsert_item( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, str] + """Insert or update the specified item. + + If the item already exists in the container, it is replaced. If the item + does not already exist, it is inserted. + + :param body: A dict-like object representing the item to update or insert. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the upserted item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableIdGeneration"] = True + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = await self.client_connection.UpsertItem( + database_or_container_link=self.container_link, + document=body, + options=request_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def replace_item( self, diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index fa298a8d7cbe..a486a0364a47 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -66,8 +66,10 @@ async def async_crud_test(): cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) body1 = get_test_item() await cont.create_item(body=body1) - print("Created item, now reading and then replacing") + print("Created item, now reading and then upserting/replacing") body2 = get_test_item() + await cont.upsert_item(body=body1) + # Check here for read all items and verify there is still only 1 left after upsert await cont.replace_item(item=body1["id"], body=body2) print("Item replaced, now attempting read") try: @@ -166,7 +168,7 @@ async def create_tests(): cont1, cont2 = "c01", "c02" num = 10 ids1 = timed_sync_create(db1,cont1,num) - ids2 = await timed_async_create(db1,cont1,num) + ids2 = await timed_async_create(db2,cont2,num) print(len(ids1) == len(ids2)) def user_test(): @@ -188,6 +190,16 @@ def wrong_test(): client = SyncClient.from_connection_string("") print(list(client.list_databases())) +# async def read_all(): +# async with AsyncClient(endpoint, key) as client: +# db = await client.create_database_if_not_exists("readall") +# cont = await db.create_container_if_not_exists("cont", PartitionKey(path='/lastName')) +# for i in range(5): +# await cont.create_item(body=get_test_item()) +# c = await cont.read_all_items() +# print(await c.__anext__()) +# print(type(c)) + async def main(): # await read_tests() await async_crud_test() From 5d74c8fb8bbc73a0978b0610b66cd3a21d1ed587 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 11 Oct 2021 10:12:16 -0400 Subject: [PATCH 16/56] missing query methods --- .../aio/_cosmos_client_connection_async.py | 134 ++++++++++++++- .../azure/cosmos/aio/container.py | 141 +++++++++++----- .../azure/cosmos/aio/cosmos_client.py | 23 ++- .../azure-cosmos/azure/cosmos/aio/database.py | 156 ++++++++++++++++++ 4 files changed, 408 insertions(+), 46 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 96da0dc99663..1747d49e7aec 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -62,7 +62,6 @@ # pylint: disable=protected-access - class CosmosClientConnection(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes """Represents a document client. @@ -211,6 +210,12 @@ async def _setup(self): self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount(**self._setup_kwargs) await self._global_endpoint_manager.force_refresh(self._setup_kwargs['database_account']) + def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return database_id, path + async def GetDatabaseAccount(self, url_connection=None, **kwargs): """Gets database account info. @@ -276,6 +281,28 @@ async def CreateDatabase(self, database, options=None, **kwargs): path = "/dbs" return await self.Create(database, path, "dbs", None, None, options, **kwargs) + async def CreateUser(self, database_link, user, options=None, **kwargs): + """Creates a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to create. + :param dict options: + The request options for the request. + + :return: + The created User. + :rtype: + dict + + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return await self.Create(user, path, "users", database_id, None, options, **kwargs) + async def CreateContainer(self, database_link, collection, options=None, **kwargs): """Creates a collection in a database. @@ -369,6 +396,26 @@ async def Create(self, body, path, typ, id, initial_headers, options=None, **kwa self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result + async def UpsertUser(self, database_link, user, options=None, **kwargs): + """Upserts a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted User. + :rtype: dict + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return await self.Upsert(user, path, "users", database_id, None, options, **kwargs) + async def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): """Upserts a document in a collection. @@ -527,6 +574,26 @@ async def ReadItem(self, document_link, options=None, **kwargs): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return await self.Read(path, "docs", document_id, None, options, **kwargs) + async def ReadConflict(self, conflict_link, options=None, **kwargs): + """Reads a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + + :return: + The read Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return await self.Read(path, "conflicts", conflict_id, None, options, **kwargs) + async def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Reads a Azure Cosmos resource and returns it. @@ -578,6 +645,29 @@ async def __Get(self, path, request_params, req_headers, **kwargs): **kwargs ) + async def ReplaceUser(self, user_link, user, options=None, **kwargs): + """Replaces a user and return it. + + :param str user_link: + The link to the user entity. + :param dict user: + :param dict options: + The request options for the request. + + :return: + The new User. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return await self.Replace(user, path, "users", user_id, None, options, **kwargs) + async def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): """Replaces a collection and return it. @@ -714,6 +804,27 @@ async def DeleteDatabase(self, database_link, options=None, **kwargs): database_id = base.GetResourceIdOrFullNameFromLink(database_link) return await self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) + async def DeleteUser(self, user_link, options=None, **kwargs): + """Deletes a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The deleted user. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return await self.DeleteResource(path, "users", user_id, None, options, **kwargs) + async def DeleteContainer(self, collection_link, options=None, **kwargs): """Deletes a collection. @@ -757,6 +868,27 @@ async def DeleteItem(self, document_link, options=None, **kwargs): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return await self.DeleteResource(path, "docs", document_id, None, options, **kwargs) + async def DeleteConflict(self, conflict_link, options=None, **kwargs): + """Deletes a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + The request options for the request. + + :return: + The deleted Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return await self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) + async def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Deletes a Azure Cosmos resource and returns it. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index fb1252404063..92f35704de11 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -42,6 +42,8 @@ # pylint: disable=protected-access # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs +# Missing container query methods: +# query_items(), query_items_change_feed(), read_all_items(), query_conflicts(), list_conflicts(), replace_throughput(), read_offer() class ContainerProxy(object): """An interface to interact with a specific DB Container. @@ -93,11 +95,59 @@ def _get_document_link(self, item_or_link): return u"{}/docs/{}".format(self.container_link, item_or_link) return item_or_link["_self"] + def _get_conflict_link(self, conflict_or_link): + # type: (Union[Dict[str, Any], str]) -> str + if isinstance(conflict_or_link, six.string_types): + return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) + return conflict_or_link["_self"] + def _set_partition_key(self, partition_key): if partition_key == NonePartitionKeyValue: return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) #might have to await here return partition_key + @distributed_trace_async + async def read( + self, + populate_query_metrics=None, # type: Optional[bool] + populate_partition_key_range_statistics=None, # type: Optional[bool] + populate_quota_info=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Read the container properties. + + :param populate_query_metrics: Enable returning query metrics in response headers. + :param populate_partition_key_range_statistics: Enable returning partition key + range statistics in response headers. + :param populate_quota_info: Enable returning collection storage quota information in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. + This includes if the container does not exist. + :returns: Dict representing the retrieved container. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + request_options["populateQueryMetrics"] = populate_query_metrics + if populate_partition_key_range_statistics is not None: + request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics + if populate_quota_info is not None: + request_options["populateQuotaInfo"] = populate_quota_info + + collection_link = self.container_link + self._properties = await self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + @distributed_trace_async async def create_item( self, @@ -150,48 +200,6 @@ async def create_item( response_hook(self.client_connection.last_response_headers, result) return result - @distributed_trace_async - async def read( - self, - populate_query_metrics=None, # type: Optional[bool] - populate_partition_key_range_statistics=None, # type: Optional[bool] - populate_quota_info=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Dict[str, Any] - """Read the container properties. - - :param populate_query_metrics: Enable returning query metrics in response headers. - :param populate_partition_key_range_statistics: Enable returning partition key - range statistics in response headers. - :param populate_quota_info: Enable returning collection storage quota information in response headers. - :keyword str session_token: Token for use with Session consistency. - :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. - :keyword Callable response_hook: A callable invoked with the response metadata. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. - This includes if the container does not exist. - :returns: Dict representing the retrieved container. - :rtype: dict[str, Any] - """ - request_options = build_options(kwargs) - response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if populate_partition_key_range_statistics is not None: - request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics - if populate_quota_info is not None: - request_options["populateQuotaInfo"] = populate_quota_info - - collection_link = self.container_link - self._properties = await self.client_connection.ReadContainer( - collection_link, options=request_options, **kwargs - ) - - if response_hook: - response_hook(self.client_connection.last_response_headers, self._properties) - - return cast('Dict[str, Any]', self._properties) - @distributed_trace_async async def read_item( self, @@ -383,3 +391,52 @@ async def delete_item( result = await self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace_async + async def get_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] + """Get the conflict identified by `conflict`. + + :param conflict: The ID (name) or dict representing the conflict to retrieve. + :param partition_key: Partition key for the conflict to retrieve. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the retrieved conflict. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given conflict couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + + result = await self.client_connection.ReadConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def delete_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> None + """Delete a specified conflict from the container. + + If the conflict does not already exist in the container, an exception is raised. + + :param conflict: The ID (name) or dict representing the conflict to be deleted. + :param partition_key: Partition key for the conflict to delete. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The conflict does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + + result = await self.client_connection.DeleteConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 267660878f33..2261a430c75b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -22,11 +22,13 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ +#Missing methods: +#list_databases(), query_databases(), + from typing import Any, Dict, Optional, Union, cast, Iterable, List -from azure.core.tracing.decorator_async import distributed_trace_async # pylint: disable=unused-import import six -from azure.core.tracing.decorator import distributed_trace # type: ignore +from azure.core.tracing.decorator_async import distributed_trace_async # pylint: disable=unused-import from ..cosmos_client import _parse_connection_str, _build_auth from ._cosmos_client_connection_async import CosmosClientConnection @@ -318,4 +320,19 @@ async def delete_database( database_link = self._get_database_link(database) await self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) if response_hook: - response_hook(self.client_connection.last_response_headers) \ No newline at end of file + response_hook(self.client_connection.last_response_headers) + + @distributed_trace_async + async def get_database_account(self, **kwargs): + # type: (Any) -> DatabaseAccount + """Retrieve the database account information. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. + :rtype: ~azure.cosmos.DatabaseAccount + """ + response_hook = kwargs.pop('response_hook', None) + result = await self.client_connection.GetDatabaseAccount(**kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index 54ca6e644871..ba2fc513d0ea 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -42,6 +42,9 @@ # pylint: disable=protected-access # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs +#Missing query methods: +#list_containers(), query_containers(), list_users(), query_users(), read_offer(), replace_throughput() + class DatabaseProxy(object): """An interface to interact with a specific database. @@ -98,6 +101,16 @@ def _get_container_link(self, container_or_id): # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) + def _get_user_link(self, user_or_id): + # type: (Union[UserProxy, str, Dict[str, Any]]) -> str + if isinstance(user_or_id, six.string_types): + return u"{}/users/{}".format(self.database_link, user_or_id) + try: + return cast("UserProxy", user_or_id).user_link + except AttributeError: + pass + return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) + @distributed_trace_async async def read(self, populate_query_metrics=None, **kwargs): # type: (Optional[bool], Any) -> Dict[str, Any] @@ -419,5 +432,148 @@ async def delete_container( collection_link = self._get_container_link(container) result = await self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace_async + async def create_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """Create a new user in the container. + + To update or replace an existing user, use the + :func:`ContainerProxy.upsert_user` method. + + :param body: A dict-like object with an `id` key and value representing the user to be created. + The user ID must be unique within the database, and consist of no more than 255 characters. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the new user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be created. + :rtype: ~azure.cosmos.UserProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_user] + :end-before: [END create_user] + :language: python + :dedent: 0 + :caption: Create a database user: + :name: create_user + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = await self.client_connection.CreateUser( + database_link=self.database_link, user=body, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + def get_user_client(self, user): + # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy + """Get a `UserProxy` for a user with specified ID. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be retrieved. + :returns: A `UserProxy` instance representing the retrieved user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. + :rtype: ~azure.cosmos.UserProxy + """ + if isinstance(user, UserProxy): + id_value = user.id + else: + try: + id_value = user["id"] + except TypeError: + id_value = user + + return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + + @distributed_trace_async + async def upsert_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """Insert or update the specified user. + + If the user already exists in the container, it is replaced. If the user + does not already exist, it is inserted. + + :param body: A dict-like object representing the user to update or insert. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the upserted user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user could not be upserted. + :rtype: ~azure.cosmos.UserProxy + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = await self.client_connection.UpsertUser( + database_link=self.database_link, user=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + @distributed_trace_async + async def replace_user( + self, + user, # type: Union[str, UserProxy, Dict[str, Any]] + body, # type: Dict[str, Any] + **kwargs # type: Any + ): + # type: (...) -> UserProxy + """Replaces the specified user if it exists in the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be replaced. + :param body: A dict-like object representing the user to replace. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the user after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If the replace failed or the user with given ID does not exist. + :rtype: ~azure.cosmos.UserProxy + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + replaced_user = await self.client_connection.ReplaceUser( + user_link=self._get_user_link(user), user=body, options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, replaced_user) + + return UserProxy( + client_connection=self.client_connection, + id=replaced_user["id"], + database_link=self.database_link, + properties=replaced_user + ) + + @distributed_trace_async + async def delete_user(self, user, **kwargs): + # type: (Union[str, UserProxy, Dict[str, Any]], Any) -> None + """Delete the specified user from the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be deleted. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The user wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The user does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = await self.client_connection.DeleteUser( + user_link=self._get_user_link(user), options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file From 89fc2f731c39ab18cf07b085d6544bfbc7f55230 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 11 Oct 2021 22:42:55 -0400 Subject: [PATCH 17/56] CRUD for udf, sproc, triggers --- .../aio/_cosmos_client_connection_async.py | 346 ++++++++++++++ .../azure/cosmos/aio/container.py | 11 +- .../azure-cosmos/azure/cosmos/aio/scripts.py | 430 ++++++++++++++++++ 3 files changed, 784 insertions(+), 3 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 1747d49e7aec..0e77367dc146 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -216,6 +216,41 @@ def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable database_id = base.GetResourceIdOrFullNameFromLink(database_link) return database_id, path + def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc.pop("serverScript", "")) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, sproc + + def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger.pop("serverScript", "")) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, trigger + + def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf.pop("serverScript", "")) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, udf + async def GetDatabaseAccount(self, url_connection=None, **kwargs): """Gets database account info. @@ -365,6 +400,103 @@ async def CreateItem(self, database_or_container_link, document, options=None, * return await self.Create(document, path, "docs", collection_id, None, options, **kwargs) + async def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): + """Creates a user-defined function in a collection. + + :param str collection_link: + The link to the collection. + :param str udf: + :param dict options: + The request options for the request. + + :return: + The created UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) + return await self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) + + async def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): + """Creates a trigger in a collection. + + :param str collection_link: + The link to the document collection. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The created Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) + return await self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) + + async def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): + """Creates a stored procedure in a collection. + + :param str collection_link: + The link to the document collection. + :param str sproc: + :param dict options: + The request options for the request. + + :return: + The created Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) + return await self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) + + async def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): + """Executes a store procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict params: + List or None + :param dict options: + The request options for the request. + + :return: + The Stored Procedure response. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = dict(self.default_headers) + initial_headers.update({http_constants.HttpHeaders.Accept: (runtime_constants.MediaTypes.Json)}) + + if params and not isinstance(params, list): + params = [params] + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + headers = base.GetHeaders(self, initial_headers, "post", path, sproc_id, "sprocs", options) + + # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) + result, self.last_response_headers = await self.__Post(path, request_params, params, headers, **kwargs) + return result + async def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Creates a Azure Cosmos resource and returns it. @@ -574,6 +706,69 @@ async def ReadItem(self, document_link, options=None, **kwargs): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return await self.Read(path, "docs", document_id, None, options, **kwargs) + async def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Reads a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The read UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return await self.Read(path, "udfs", udf_id, None, options, **kwargs) + + async def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): + """Reads a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The read Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return await self.Read(path, "sprocs", sproc_id, None, options, **kwargs) + + async def ReadTrigger(self, trigger_link, options=None, **kwargs): + """Reads a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The read Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return await self.Read(path, "triggers", trigger_id, None, options, **kwargs) + async def ReadConflict(self, conflict_link, options=None, **kwargs): """Reads a conflict. @@ -692,6 +887,64 @@ async def ReplaceContainer(self, collection_link, collection, options=None, **kw collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return await self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) + async def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): + """Replaces a user-defined function and returns it. + + :param str udf_link: + The link to the user-defined function. + :param dict udf: + :param dict options: + The request options for the request. + + :return: + The new UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf["serverScript"]) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return await self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) + + async def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): + """Replaces a trigger and returns it. + + :param str trigger_link: + The link to the trigger. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The replaced Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger["serverScript"]) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return await self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) + async def ReplaceItem(self, document_link, new_document, options=None, **kwargs): """Replaces a document and returns it. @@ -727,6 +980,36 @@ async def ReplaceItem(self, document_link, new_document, options=None, **kwargs) return await self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) + + async def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): + """Replaces a stored procedure and returns it. + + :param str sproc_link: + The link to the stored procedure. + :param dict sproc: + :param dict options: + The request options for the request. + + :return: + The replaced Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc["serverScript"]) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return await self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) + async def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Replaces a Azure Cosmos resource and returns it. @@ -868,6 +1151,69 @@ async def DeleteItem(self, document_link, options=None, **kwargs): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return await self.DeleteResource(path, "docs", document_id, None, options, **kwargs) + async def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Deletes a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The deleted UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return await self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) + + async def DeleteTrigger(self, trigger_link, options=None, **kwargs): + """Deletes a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The deleted Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return await self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) + + async def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): + """Deletes a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The deleted Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return await self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) + async def DeleteConflict(self, conflict_link, options=None, **kwargs): """Deletes a conflict. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 92f35704de11..3e1fb5876364 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -25,8 +25,6 @@ from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import import six -import asyncio -import time from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore from ._cosmos_client_connection_async import CosmosClientConnection @@ -34,7 +32,7 @@ from ..exceptions import CosmosResourceNotFoundError from ..http_constants import StatusCodes from ..offer import Offer -from ..scripts import ScriptsProxy +from .scripts import ScriptsProxy from ..partition_key import NonePartitionKeyValue __all__ = ("ContainerProxy",) @@ -89,6 +87,13 @@ async def is_system_key(self): ) return cast('bool', self._is_system_key) + @property + def scripts(self): + # type: () -> ScriptsProxy + if self._scripts is None: + self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + return cast('ScriptsProxy', self._scripts) + def _get_document_link(self, item_or_link): # type: (Union[Dict[str, Any], str]) -> str if isinstance(item_or_link, six.string_types): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py new file mode 100644 index 000000000000..3acc8780917c --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -0,0 +1,430 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, Iterable, Optional + +import six + +from azure.cosmos.aio._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from ..partition_key import NonePartitionKeyValue + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class ScriptType(object): + StoredProcedure = "sprocs" + Trigger = "triggers" + UserDefinedFunction = "udfs" + + +class ScriptsProxy(object): + """An interface to interact with stored procedures. + + This class should not be instantiated directly. Instead, use the + :func:`ContainerProxy.scripts` attribute. + """ + + def __init__(self, client_connection, container_link, is_system_key): + # type: (CosmosClientConnection, str, bool) -> None + self.client_connection = client_connection + self.container_link = container_link + self.is_system_key = is_system_key + + def _get_resource_link(self, script_or_id, typ): + # type: (Union[Dict[str, Any], str], str) -> str + if isinstance(script_or_id, six.string_types): + return u"{}/{}/{}".format(self.container_link, typ, script_or_id) + return script_or_id["_self"] + + #TODO add async query functionality + # async def list_stored_procedures(self, max_item_count=None, **kwargs): + # # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # """List all stored procedures in the container. + + # :param int max_item_count: Max number of items to be returned in the enumeration operation. + # :returns: An Iterable of stored procedures (dicts). + # :rtype: Iterable[dict[str, Any]] + # """ + # feed_options = build_options(kwargs) + # if max_item_count is not None: + # feed_options["maxItemCount"] = max_item_count + + # return await self.client_connection.ReadStoredProcedures( + # collection_link=self.container_link, options=feed_options, **kwargs + # ) + + #TODO add async query functionality + # async def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): + # # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # """Return all stored procedures matching the given `query`. + + # :param query: The Azure Cosmos DB SQL query to execute. + # :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + # :param max_item_count: Max number of items to be returned in the enumeration operation. + # :returns: An Iterable of stored procedures (dicts). + # :rtype: Iterable[dict[str, Any]] + # """ + # feed_options = build_options(kwargs) + # if max_item_count is not None: + # feed_options["maxItemCount"] = max_item_count + + # return await self.client_connection.QueryStoredProcedures( + # collection_link=self.container_link, + # query=query if parameters is None else dict(query=query, parameters=parameters), + # options=feed_options, + # **kwargs + # ) + + async def get_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] + """Get the stored procedure identified by `id`. + + :param sproc: The ID (name) or dict representing stored procedure to retrieve. + :returns: A dict representing the retrieved stored procedure. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.ReadStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs + ) + + async def create_stored_procedure(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """Create a new stored procedure in the container. + + To replace an existing sproc, use the :func:`Container.scripts.replace_stored_procedure` method. + + :param body: A dict-like object representing the sproc to create. + :returns: A dict representing the new stored procedure. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.CreateStoredProcedure( + collection_link=self.container_link, sproc=body, options=request_options, **kwargs + ) + + async def replace_stored_procedure(self, sproc, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """Replace a specified stored procedure in the container. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be replaced. + :param body: A dict-like object representing the sproc to replace. + :returns: A dict representing the stored procedure after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the stored + procedure with given id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.ReplaceStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + sproc=body, + options=request_options, + **kwargs + ) + + async def delete_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """Delete a specified stored procedure from the container. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be deleted. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The sproc wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The sproc does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + + await self.client_connection.DeleteStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs + ) + + async def execute_stored_procedure( + self, + sproc, # type: Union[str, Dict[str, Any]] + partition_key=None, # type: Optional[str] + params=None, # type: Optional[List[Any]] + enable_script_logging=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Any + """Execute a specified stored procedure. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be executed. + :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. + :param params: List of parameters to be passed to the stored procedure to be executed. + :param bool enable_script_logging: Enables or disables script logging for the current request. + :returns: Result of the executed stored procedure for the given parameters. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the stored procedure execution failed + or if the stored procedure with given id does not exists in the container. + :rtype: dict[str, Any] + """ + + request_options = build_options(kwargs) + if partition_key is not None: + request_options["partitionKey"] = ( + CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + if partition_key == NonePartitionKeyValue + else partition_key + ) + if enable_script_logging is not None: + request_options["enableScriptLogging"] = enable_script_logging + + return await self.client_connection.ExecuteStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + params=params, + options=request_options, + **kwargs + ) + + #TODO add query functionality + # async def list_triggers(self, max_item_count=None, **kwargs): + # # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # """List all triggers in the container. + + # :param max_item_count: Max number of items to be returned in the enumeration operation. + # :returns: An Iterable of triggers (dicts). + # :rtype: Iterable[dict[str, Any]] + # """ + # feed_options = build_options(kwargs) + # if max_item_count is not None: + # feed_options["maxItemCount"] = max_item_count + + # return await self.client_connection.ReadTriggers( + # collection_link=self.container_link, options=feed_options, **kwargs + # ) + + #TODO add query functionality + # async def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): + # # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # """Return all triggers matching the given `query`. + + # :param query: The Azure Cosmos DB SQL query to execute. + # :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + # :param max_item_count: Max number of items to be returned in the enumeration operation. + # :returns: An Iterable of triggers (dicts). + # :rtype: Iterable[dict[str, Any]] + # """ + # feed_options = build_options(kwargs) + # if max_item_count is not None: + # feed_options["maxItemCount"] = max_item_count + + # return await self.client_connection.QueryTriggers( + # collection_link=self.container_link, + # query=query if parameters is None else dict(query=query, parameters=parameters), + # options=feed_options, + # **kwargs + # ) + + async def get_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] + """Get a trigger identified by `id`. + + :param trigger: The ID (name) or dict representing trigger to retrieve. + :returns: A dict representing the retrieved trigger. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.ReadTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs + ) + + async def create_trigger(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """Create a trigger in the container. + + To replace an existing trigger, use the :func:`ContainerProxy.scripts.replace_trigger` method. + + :param body: A dict-like object representing the trigger to create. + :returns: A dict representing the new trigger. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.CreateTrigger( + collection_link=self.container_link, trigger=body, options=request_options, **kwargs + ) + + async def replace_trigger(self, trigger, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """Replace a specified tigger in the container. + + If the trigger does not already exist in the container, an exception is raised. + + :param trigger: The ID (name) or dict representing trigger to be replaced. + :param body: A dict-like object representing the trigger to replace. + :returns: A dict representing the trigger after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the trigger with given + id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.ReplaceTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), + trigger=body, + options=request_options, + **kwargs + ) + + async def delete_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """Delete a specified trigger from the container. + + If the trigger does not already exist in the container, an exception is raised. + + :param trigger: The ID (name) or dict representing trigger to be deleted. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The trigger wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The trigger does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + + await self.client_connection.DeleteTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs + ) + + #TODO add query functionality + # async def list_user_defined_functions(self, max_item_count=None, **kwargs): + # # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # """List all the user-defined functions in the container. + + # :param max_item_count: Max number of items to be returned in the enumeration operation. + # :returns: An Iterable of user-defined functions (dicts). + # :rtype: Iterable[dict[str, Any]] + # """ + # feed_options = build_options(kwargs) + # if max_item_count is not None: + # feed_options["maxItemCount"] = max_item_count + + # return await self.client_connection.ReadUserDefinedFunctions( + # collection_link=self.container_link, options=feed_options, **kwargs + # ) + + #TODO add query functionality + # async def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): + # # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # """Return user-defined functions matching a given `query`. + + # :param query: The Azure Cosmos DB SQL query to execute. + # :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + # :param max_item_count: Max number of items to be returned in the enumeration operation. + # :returns: An Iterable of user-defined functions (dicts). + # :rtype: Iterable[dict[str, Any]] + # """ + # feed_options = build_options(kwargs) + # if max_item_count is not None: + # feed_options["maxItemCount"] = max_item_count + + # return await self.client_connection.QueryUserDefinedFunctions( + # collection_link=self.container_link, + # query=query if parameters is None else dict(query=query, parameters=parameters), + # options=feed_options, + # **kwargs + # ) + + async def get_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] + """Get a user-defined functions identified by `id`. + + :param udf: The ID (name) or dict representing udf to retrieve. + :returns: A dict representing the retrieved user-defined function. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be retrieved. + :rtype: Iterable[dict[str, Any]] + """ + request_options = build_options(kwargs) + + return await self.client_connection.ReadUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs + ) + + async def create_user_defined_function(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """Create a user-defined function in the container. + + To replace an existing UDF, use the :func:`ContainerProxy.scripts.replace_user_defined_function` method. + + :param body: A dict-like object representing the udf to create. + :returns: A dict representing the new user-defined function. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.CreateUserDefinedFunction( + collection_link=self.container_link, udf=body, options=request_options, **kwargs + ) + + async def replace_user_defined_function(self, udf, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """Replace a specified user-defined function in the container. + + If the UDF does not already exist in the container, an exception is raised. + + :param udf: The ID (name) or dict representing udf to be replaced. + :param body: A dict-like object representing the udf to replace. + :returns: A dict representing the user-defined function after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the user-defined function + with the given id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return await self.client_connection.ReplaceUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), + udf=body, + options=request_options, + **kwargs + ) + + async def delete_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """Delete a specified user-defined function from the container. + + If the UDF does not already exist in the container, an exception is raised. + + :param udf: The ID (name) or dict representing udf to be deleted. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The udf wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The UDF does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + + await self.client_connection.DeleteUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs + ) From 043dfe025666fe07a52db8cb4bdd6f84d3c34c1e Mon Sep 17 00:00:00 2001 From: simorenoh Date: Wed, 13 Oct 2021 10:37:58 -0400 Subject: [PATCH 18/56] initial query logic + container methods --- .../aio/base_execution_context_async.py | 171 +++++++ .../aio/execution_dispatcher_async.py | 191 ++++++++ .../_execution_context/endpoint_component.py | 52 +-- .../azure/cosmos/aio/_asynchronous_request.py | 4 +- .../aio/_cosmos_client_connection_async.py | 435 +++++++++++++++++- .../azure/cosmos/aio/_query_iterable_async.py | 101 ++++ ...try_utility.py => _retry_utility_async.py} | 0 .../azure/cosmos/aio/container.py | 326 ++++++++++++- .../azure/cosmos/aio/cosmos_client.py | 2 +- .../azure-cosmos/samples/simon_testfile.py | 80 +++- 10 files changed, 1304 insertions(+), 58 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py rename sdk/cosmos/azure-cosmos/azure/cosmos/aio/{_retry_utility.py => _retry_utility_async.py} (100%) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py new file mode 100644 index 000000000000..c26eedbe1740 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py @@ -0,0 +1,171 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for query execution context implementation in the Azure Cosmos +database service. +""" + +from collections import deque +import copy + +from ...aio import _retry_utility_async +from ... import http_constants + +# pylint: disable=protected-access + + +class _QueryExecutionContextBase(object): + """ + This is the abstract base execution context class. + """ + + def __init__(self, client, options): + """ + :param CosmosClient client: + :param dict options: The request options for the request. + """ + self._client = client + self._options = options + self._is_change_feed = "changeFeed" in options and options["changeFeed"] is True + self._continuation = self._get_initial_continuation() + self._has_started = False + self._has_finished = False + self._buffer = deque() + + def _get_initial_continuation(self): + if "continuation" in self._options: + if "enableCrossPartitionQuery" in self._options: + raise ValueError("continuation tokens are not supported for cross-partition queries.") + return self._options["continuation"] + return None + + def _has_more_pages(self): + return not self._has_started or self._continuation + + async def fetch_next_block(self): + """Returns a block of results with respecting retry policy. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + if not self._has_more_pages(): + return [] + + if self._buffer: #might need async version of this + # if there is anything in the buffer returns that + res = list(self._buffer) + self._buffer.clear() + return res + + # fetches the next block + return await self._fetch_next_block() + + async def _fetch_next_block(self): + raise NotImplementedError + + async def __aiter__(self): + """Returns itself as an iterator""" + return self + + async def __anext__(self): + """Return the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + if self._has_finished: + raise StopIteration + + if not self._buffer: + + results = await self.fetch_next_block() + self._buffer.extend(results) + + if not self._buffer: + raise StopIteration + + return self._buffer.popleft() + + async def _fetch_items_helper_no_retries(self, fetch_function): + """Fetches more items and doesn't retry on failure + + :return: List of fetched items. + :rtype: list + """ + fetched_items = [] + # Continues pages till finds a non empty page or all results are exhausted + while self._continuation or not self._has_started: + if not self._has_started: + self._has_started = True + new_options = copy.deepcopy(self._options) + new_options["continuation"] = self._continuation + (fetched_items, response_headers) = await fetch_function(new_options) + continuation_key = http_constants.HttpHeaders.Continuation + # Use Etag as continuation token for change feed queries. + if self._is_change_feed: + continuation_key = http_constants.HttpHeaders.ETag + # In change feed queries, the continuation token is always populated. The hasNext() test is whether + # there is any items in the response or not. + if not self._is_change_feed or fetched_items: + self._continuation = response_headers.get(continuation_key) + else: + self._continuation = None + if fetched_items: + break + return fetched_items + + async def _fetch_items_helper_with_retries(self, fetch_function): + async def callback(): + return await self._fetch_items_helper_no_retries(fetch_function) + + return await _retry_utility_async.ExecuteAsync(self._client, self._client._global_endpoint_manager, callback) + + next = __anext__ # Python 2 compatibility. + + +class _DefaultQueryExecutionContext(_QueryExecutionContextBase): + """ + This is the default execution context. + """ + + def __init__(self, client, options, fetch_function): + """ + :param CosmosClient client: + :param dict options: The request options for the request. + :param method fetch_function: + Will be invoked for retrieving each page + + Example of `fetch_function`: + + >>> def result_fn(result): + >>> return result['Databases'] + + """ + super(_DefaultQueryExecutionContext, self).__init__(client, options) + self._fetch_function = fetch_function + + async def _fetch_next_block(self): + while super(_DefaultQueryExecutionContext, self)._has_more_pages() and not self._buffer: + return await self._fetch_items_helper_with_retries(self._fetch_function) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py new file mode 100644 index 000000000000..5bc87d4f1138 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py @@ -0,0 +1,191 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for proxy query execution context implementation in the Azure +Cosmos database service. +""" + +from six.moves import xrange +from azure.cosmos.exceptions import CosmosHttpResponseError +from azure.cosmos._execution_context.execution_dispatcher import _is_partitioned_execution_info, _get_partitioned_execution_info +from azure.cosmos._execution_context import multi_execution_aggregator +from azure.cosmos._execution_context.aio.base_execution_context_async import _QueryExecutionContextBase, _DefaultQueryExecutionContext +from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo +from azure.cosmos._execution_context import endpoint_component +from azure.cosmos.documents import _DistinctType +from azure.cosmos.http_constants import StatusCodes + +# pylint: disable=protected-access + +class _ProxyQueryExecutionContext(_QueryExecutionContextBase): # pylint: disable=abstract-method + """Represents a proxy execution context wrapper. + + By default, uses _DefaultQueryExecutionContext. + + If backend responds a 400 error code with a Query Execution Info, switches + to _MultiExecutionContextAggregator + """ + + def __init__(self, client, resource_link, query, options, fetch_function): + """ + Constructor + """ + super(_ProxyQueryExecutionContext, self).__init__(client, options) + + self._execution_context = _DefaultQueryExecutionContext(client, options, fetch_function) + self._resource_link = resource_link + self._query = query + self._fetch_function = fetch_function + + async def __anext__(self): + """Returns the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + + """ + try: + return await self._execution_context.__anext__() + except CosmosHttpResponseError as e: + if _is_partitioned_execution_info(e): + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(await self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) + self._execution_context = self._create_pipelined_execution_context(query_execution_info) + else: + raise e + + return await self._execution_context.__anext__() + + async def fetch_next_block(self): + """Returns a block of results. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + try: + return await self._execution_context.fetch_next_block() + except CosmosHttpResponseError as e: + if _is_partitioned_execution_info(e): #cross partition query not servable + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(await self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) + self._execution_context = self._create_pipelined_execution_context(query_execution_info) + else: + raise e + + return await self._execution_context.fetch_next_block() + + def _create_pipelined_execution_context(self, query_execution_info): + + assert self._resource_link, "code bug, resource_link is required." + if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): + if self._options and ("enableCrossPartitionQuery" in self._options + and self._options["enableCrossPartitionQuery"]): + raise CosmosHttpResponseError(StatusCodes.BAD_REQUEST, + "Cross partition query only supports 'VALUE ' for aggregates") + + execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, + self._resource_link, + self._query, + self._options, + query_execution_info) + return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, + query_execution_info) + + +class _PipelineExecutionContext(_QueryExecutionContextBase): # pylint: disable=abstract-method + + DEFAULT_PAGE_SIZE = 1000 + + def __init__(self, client, options, execution_context, query_execution_info): + super(_PipelineExecutionContext, self).__init__(client, options) + + if options.get("maxItemCount"): + self._page_size = options["maxItemCount"] + else: + self._page_size = _PipelineExecutionContext.DEFAULT_PAGE_SIZE + + self._execution_context = execution_context + + self._endpoint = endpoint_component._QueryExecutionEndpointComponent(execution_context) + + order_by = query_execution_info.get_order_by() + if order_by: + self._endpoint = endpoint_component._QueryExecutionOrderByEndpointComponent(self._endpoint) + + aggregates = query_execution_info.get_aggregates() + if aggregates: + self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates) + + offset = query_execution_info.get_offset() + if offset is not None: + self._endpoint = endpoint_component._QueryExecutionOffsetEndpointComponent(self._endpoint, offset) + + top = query_execution_info.get_top() + if top is not None: + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top) + + limit = query_execution_info.get_limit() + if limit is not None: + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) + + distinct_type = query_execution_info.get_distinct_type() + if distinct_type != _DistinctType.NoneType: + if distinct_type == _DistinctType.Ordered: + self._endpoint = endpoint_component._QueryExecutionDistinctOrderedEndpointComponent(self._endpoint) + else: + self._endpoint = endpoint_component._QueryExecutionDistinctUnorderedEndpointComponent(self._endpoint) + + async def __anext__(self): + """Returns the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + return await self._endpoint.__anext__() + + async def fetch_next_block(self): + """Returns a block of results. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + This method internally invokes next() as many times required to collect + the requested fetch size. + + :return: List of results. + :rtype: list + """ + + results = [] + for _ in xrange(self._page_size): + try: + results.append(await self.__anext__) + except StopIteration: + # no more results + break + return results \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 9a948f5f716d..54208bdad176 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -41,14 +41,12 @@ class _QueryExecutionEndpointComponent(object): def __init__(self, execution_context): self._execution_context = execution_context - def __iter__(self): + async def __aiter__(self): return self - def __next__(self): + async def __anext__(self): # supports python 3 iterator - return next(self._execution_context) - - next = __next__ # Python 2 compatibility. + return await self._execution_context.__anext__() class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): @@ -56,10 +54,8 @@ class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): For each processed orderby result it returns 'payload' item of the result. """ - def __next__(self): - return next(self._execution_context)["payload"] - - next = __next__ # Python 2 compatibility. + async def __anext__(self): + return await self._execution_context.__anext__()["payload"] class _QueryExecutionTopEndpointComponent(_QueryExecutionEndpointComponent): @@ -72,15 +68,13 @@ def __init__(self, execution_context, top_count): super(_QueryExecutionTopEndpointComponent, self).__init__(execution_context) self._top_count = top_count - def __next__(self): + async def __anext__(self): if self._top_count > 0: - res = next(self._execution_context) + res = await self._execution_context.__anext__() self._top_count -= 1 return res raise StopIteration - next = __next__ # Python 2 compatibility. - class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling distinct query. @@ -91,15 +85,13 @@ def __init__(self, execution_context): super(_QueryExecutionDistinctOrderedEndpointComponent, self).__init__(execution_context) self.last_result = None - def __next__(self): - res = next(self._execution_context) + async def __anext__(self): + res = await self._execution_context.__anext__() while self.last_result == res: - res = next(self._execution_context) + res = await self._execution_context.__anext__() self.last_result = res return res - next = __next__ # Python 2 compatibility. - class _QueryExecutionDistinctUnorderedEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling distinct query. @@ -123,8 +115,8 @@ def make_hash(self, value): return tuple(frozenset(sorted(new_value.items()))) - def __next__(self): - res = next(self._execution_context) + async def __anext__(self): + res = await self._execution_context.__anext__() json_repr = json.dumps(self.make_hash(res)) if six.PY3: @@ -134,7 +126,7 @@ def __next__(self): hashed_result = hash_object.hexdigest() while hashed_result in self.last_result: - res = next(self._execution_context) + res = await self._execution_context.__anext__() json_repr = json.dumps(self.make_hash(res)) if six.PY3: json_repr = json_repr.encode("utf-8") @@ -144,8 +136,6 @@ def __next__(self): self.last_result.add(hashed_result) return res - next = __next__ # Python 2 compatibility. - class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling offset query. @@ -156,16 +146,14 @@ def __init__(self, execution_context, offset_count): super(_QueryExecutionOffsetEndpointComponent, self).__init__(execution_context) self._offset_count = offset_count - def __next__(self): + async def __anext__(self): while self._offset_count > 0: - res = next(self._execution_context) + res = await self._execution_context.__anext__() if res is not None: self._offset_count -= 1 else: raise StopIteration - return next(self._execution_context) - - next = __next__ # Python 2 compatibility. + return await self._execution_context.__anext__() class _QueryExecutionAggregateEndpointComponent(_QueryExecutionEndpointComponent): @@ -191,9 +179,9 @@ def __init__(self, execution_context, aggregate_operators): elif operator == "Sum": self._local_aggregators.append(_SumAggregator()) - def __next__(self): - for res in self._execution_context: - for item in res: + async def __anext__(self): + async for res in self._execution_context: + for item in res: #TODO check on this being an async loop for operator in self._local_aggregators: if isinstance(item, dict) and item: operator.aggregate(item["item"]) @@ -208,5 +196,3 @@ def __next__(self): self._result_index += 1 return res raise StopIteration - - next = __next__ # Python 2 compatibility. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index 20fbfd649f1e..fe292174dbe7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -31,7 +31,7 @@ from .. import exceptions from .. import http_constants -from . import _retry_utility +from . import _retry_utility_async from .._synchronized_request import _request_body_from_data @@ -174,7 +174,7 @@ async def AsynchronousRequest( request.headers[http_constants.HttpHeaders.ContentLength] = 0 # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries - return await _retry_utility.ExecuteAsync( + return await _retry_utility_async.ExecuteAsync( client, global_endpoint_manager, _Request, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 1747d49e7aec..6b6575fd1f67 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -28,7 +28,6 @@ # Currently pylint is locked to 2.3.3 and this is fixed in 2.4.4 from typing import Dict, Any, Optional # pylint: disable=unused-import import six -import asyncio from urllib3.util.retry import Retry from azure.core.async_paging import AsyncItemPaged from azure.core import AsyncPipelineClient @@ -49,13 +48,13 @@ from ..documents import ConnectionPolicy from .. import _constants as constants from .. import http_constants -from .. import _query_iterable as query_iterable +from . import _query_iterable_async as query_iterable from .. import _runtime_constants as runtime_constants from .. import _request_object from . import _asynchronous_request as asynchronous_request from . import _global_endpoint_manager_async as global_endpoint_manager_async from .._routing import routing_map_provider -from ._retry_utility import ConnectionRetryPolicy +from ._retry_utility_async import ConnectionRetryPolicy from .. import _session from .. import _utils from ..partition_key import _Undefined, _Empty @@ -727,6 +726,24 @@ async def ReplaceItem(self, document_link, new_document, options=None, **kwargs) return await self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) + async def ReplaceOffer(self, offer_link, offer, **kwargs): + """Replaces an offer and returns it. + + :param str offer_link: + The link to the offer. + :param dict offer: + + :return: + The replaced Offer. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(offer) + path = base.GetPathFromLink(offer_link) + offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) + return await self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) + async def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin """Replaces a Azure Cosmos resource and returns it. @@ -944,6 +961,388 @@ async def __Delete(self, path, request_params, req_headers, **kwargs): **kwargs ) + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): + """Reads all documents in a collection. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) + + def QueryItems( + self, + database_or_container_link, + query, + options=None, + partition_key=None, + response_hook=None, + **kwargs + ): + """Queries documents in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key: + Partition key for the query(default value None) + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) + + if options is None: + options = {} + + if base.IsDatabaseLink(database_or_container_link): + return AsyncItemPaged( + self, + query, + options, + database_link=database_or_container_link, + partition_key=partition_key, + page_iterator_class=query_iterable.QueryIterable + ) + + path = base.GetPathFromLink(database_or_container_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + collection_link=database_or_container_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): + """Queries documents change feed in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + options may also specify partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + + partition_key_range_id = None + if options is not None and "partitionKeyRangeId" in options: + partition_key_range_id = options["partitionKeyRangeId"] + + return self._QueryChangeFeed( + collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs + ) + + def _QueryChangeFeed( + self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, **kwargs + ): + """Queries change feed of a resource in a collection. + + :param str collection_link: + The link to the document collection. + :param str resource_type: + The type of the resource. + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + options["changeFeed"] = True + + resource_key_map = {"Documents": "docs"} + + # For now, change feed only supports Documents and Partition Key Range resouce type + if resource_type not in resource_key_map: + raise NotImplementedError(resource_type + " change feed query is not supported.") + + resource_key = resource_key_map[resource_type] + path = base.GetPathFromLink(collection_link, resource_key) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, + resource_key, + collection_id, + lambda r: r[resource_type], + lambda _, b: b, + None, + options, + partition_key_range_id, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + None, + options, + fetch_function=fetch_fn, + collection_link=collection_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def QueryOffers(self, query, options=None, **kwargs): + """Query for all offers. + + :param (str or dict) query: + :param dict options: + The request options for the request + + :return: + Query Iterable of Offers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + page_iterator_class=query_iterable.QueryIterable + ) + + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): + """Reads conflicts. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryConflicts(collection_link, None, feed_options, **kwargs) + + def QueryConflicts(self, collection_link, query, options=None, **kwargs): + """Queries conflicts in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "conflicts") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "conflicts", collection_id, lambda r: r["Conflicts"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + async def __QueryFeed( + self, + path, + typ, + id_, + result_fn, + create_fn, + query, + options=None, + partition_key_range_id=None, + response_hook=None, + is_query_plan=False, + **kwargs + ): + """Query for more than one Azure Cosmos resources. + + :param str path: + :param str typ: + :param str id_: + :param function result_fn: + :param function create_fn: + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param function response_hook: + :param bool is_query_plan: + Specififes if the call is to fetch query plan + + :rtype: + list + + :raises SystemError: If the query compatibility mode is undefined. + + """ + if options is None: + options = {} + + if query: + __GetBodiesFromQueryResult = result_fn + else: + + def __GetBodiesFromQueryResult(result): + if result is not None: + return [create_fn(self, body) for body in result_fn(result)] + # If there is no change feed, the result data is empty and result is None. + # This case should be interpreted as an empty array. + return [] + + initial_headers = self.default_headers.copy() + # Copy to make sure that default_headers won't be changed. + if query is None: + # Query operations will use ReadEndpoint even though it uses GET(for feed requests) + request_params = _request_object.RequestObject(typ, + documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) + headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = await self.__Get(path, request_params, headers, **kwargs) + if response_hook: + response_hook(self.last_response_headers, result) + return __GetBodiesFromQueryResult(result) + + query = self.__CheckAndUnifyQueryFormat(query) + + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + if not is_query_plan: + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.QueryJson + elif self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery: + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.SQL + else: + raise SystemError("Unexpected query compatibility mode.") + + # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) + request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) + req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = await self.__Post(path, request_params, query, req_headers, **kwargs) + + if response_hook: + response_hook(self.last_response_headers, result) + + return __GetBodiesFromQueryResult(result) + + def __CheckAndUnifyQueryFormat(self, query_body): + """Checks and unifies the format of the query body. + + :raises TypeError: If query_body is not of expected type (depending on the query compatibility mode). + :raises ValueError: If query_body is a dict but doesn\'t have valid query text. + :raises SystemError: If the query compatibility mode is undefined. + + :param (str or dict) query_body: + + :return: + The formatted query body. + :rtype: + dict or string + """ + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + if not isinstance(query_body, dict) and not isinstance(query_body, six.string_types): + raise TypeError("query body must be a dict or string.") + if isinstance(query_body, dict) and not query_body.get("query"): + raise ValueError('query body must have valid query text with key "query".') + if isinstance(query_body, six.string_types): + return {"query": query_body} + elif ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery + and not isinstance(query_body, six.string_types) + ): + raise TypeError("query body must be a string.") + else: + raise SystemError("Unexpected query compatibility mode.") + + return query_body + def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): """ Updates session if necessary. @@ -1097,6 +1496,36 @@ def _retrieve_partition_key(self, partition_key_parts, document, is_system_key): return partitionKey + async def _GetQueryPlanThroughGateway(self, query, resource_link, **kwargs): + supported_query_features = (documents._QueryFeature.Aggregate + "," + + documents._QueryFeature.CompositeAggregate + "," + + documents._QueryFeature.Distinct + "," + + documents._QueryFeature.MultipleOrderBy + "," + + documents._QueryFeature.OffsetAndLimit + "," + + documents._QueryFeature.OrderBy + "," + + documents._QueryFeature.Top) + + options = { + "contentType": runtime_constants.MediaTypes.Json, + "isQueryPlanRequest": True, + "supportedQueryFeatures": supported_query_features, + "queryVersion": http_constants.Versions.QueryVersion + } + + resource_link = base.TrimBeginningAndEndingSlashes(resource_link) + path = base.GetPathFromLink(resource_link, "docs") + resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) + + return await self.__QueryFeed(path, + "docs", + resource_id, + lambda r: r, + None, + query, + options, + is_query_plan=True, + **kwargs) + @staticmethod def _return_undefined_or_empty_partition_key(is_system_key): if is_system_key: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py new file mode 100644 index 000000000000..4a3554154963 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py @@ -0,0 +1,101 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Iterable query results in the Azure Cosmos database service. +""" +from azure.core.async_paging import AsyncPageIterator +from azure.cosmos._execution_context.aio import execution_dispatcher_async + +# pylint: disable=protected-access + + +class QueryIterable(AsyncPageIterator): + """Represents an iterable object of the query results. + + QueryIterable is a wrapper for query execution context. + """ + + def __init__( + self, + client, + query, + options, + fetch_function=None, + collection_link=None, + database_link=None, + partition_key=None, + continuation_token=None, + ): + """Instantiates a QueryIterable for non-client side partitioning queries. + + _ProxyQueryExecutionContext will be used as the internal query execution + context. + + :param CosmosClient client: Instance of document client. + :param (str or dict) query: + :param dict options: The request options for the request. + :param method fetch_function: + :param method resource_type: The type of the resource being queried + :param str resource_link: If this is a Document query/feed collection_link is required. + + Example of `fetch_function`: + + >>> def result_fn(result): + >>> return result['Databases'] + + """ + self._client = client + self.retry_options = client.connection_policy.RetryOptions + self._query = query + self._options = options + if continuation_token: + options['continuation'] = continuation_token + self._fetch_function = fetch_function + self._collection_link = collection_link + self._database_link = database_link + self._partition_key = partition_key + self._ex_context = execution_dispatcher_async._ProxyQueryExecutionContext( + self._client, self._collection_link, self._query, self._options, self._fetch_function + ) + super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) + + async def _unpack(self, block): + continuation = None + if self._client.last_response_headers: + continuation = self._client.last_response_headers.get("x-ms-continuation") or \ + self._client.last_response_headers.get('etag') + if block: + self._did_a_call_already = False + return continuation, block + + async def _fetch_next(self, *args): # pylint: disable=unused-argument + """Return a block of results with respecting retry policy. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + block = await self._ex_context.fetch_next_block() + if not block: + raise StopAsyncIteration + return block diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 92f35704de11..bdf212fea155 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -22,11 +22,10 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import +from typing import Any, Dict, List, Optional, Union, Iterable, cast import six -import asyncio -import time +from azure.core.tracing.decorator import distributed_trace # pylint: disable=unused-import from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore from ._cosmos_client_connection_async import CosmosClientConnection @@ -43,7 +42,7 @@ # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs # Missing container query methods: -# query_items(), query_items_change_feed(), read_all_items(), query_conflicts(), list_conflicts(), replace_throughput(), read_offer() +# query_conflicts(), list_conflicts(), replace_throughput(), read_offer() class ContainerProxy(object): """An interface to interact with a specific DB Container. @@ -248,6 +247,172 @@ async def read_item( response_hook(self.client_connection.last_response_headers, result) return result + @distributed_trace + def read_all_items( + self, + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List all the items in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.ReadItems( + collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def query_items( + self, + query, # type: str + parameters=None, # type: Optional[List[Dict[str, object]]] + partition_key=None, # type: Optional[Any] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + enable_scan_in_query=None, # type: Optional[bool] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Return all results matching the given `query`. + + You can use any value for the container name in the FROM clause, but + often the container name is used. In the examples below, the container + name is "products," and is aliased as "p" for easier referencing in + the WHERE clause. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. + :param partition_key: Specifies the partition key value for the item. + :param enable_cross_partition_query: Allows sending of more than one request to + execute the query in the Azure Cosmos DB service. + More than one request is necessary if the query is not scoped to single partition key value. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param enable_scan_in_query: Allow scan on the queries which couldn't be served as + indexing was opted out on the requested paths. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START query_items] + :end-before: [END query_items] + :language: python + :dedent: 0 + :caption: Get all products that have not been discontinued: + :name: query_items + + .. literalinclude:: ../samples/examples.py + :start-after: [START query_items_param] + :end-before: [END query_items_param] + :language: python + :dedent: 0 + :caption: Parameterized query to get all products that have been discontinued: + :name: query_items_param + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + if enable_scan_in_query is not None: + feed_options["enableScanInQuery"] = enable_scan_in_query + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.QueryItems( + database_or_container_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + partition_key=partition_key, + response_hook=response_hook, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def query_items_change_feed( + self, + partition_key_range_id=None, # type: Optional[str] + is_start_from_beginning=False, # type: bool + continuation=None, # type: Optional[str] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Get a sorted list of items that were changed, in the order in which they were modified. + + :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. + This is used to process the change feed in parallel across multiple consumers. + :param partition_key: partition key at which ChangeFeed requests are targetted. + :param is_start_from_beginning: Get whether change feed should start from + beginning (true) or from current (false). By default it's start from current (false). + :param continuation: e_tag value to be used as continuation for reading change feed. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key_range_id is not None: + feed_options["partitionKeyRangeId"] = partition_key_range_id + partition_key = kwargs.pop("partitionKey", None) + if partition_key is not None: + feed_options["partitionKey"] = partition_key + if is_start_from_beginning is not None: + feed_options["isStartFromBeginning"] = is_start_from_beginning + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if continuation is not None: + feed_options["continuation"] = continuation + + if hasattr(response_hook, "clear"): + response_hook.clear() + + result = self.client_connection.QueryItemsChangeFeed( + self.container_link, options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def upsert_item( self, @@ -392,6 +557,159 @@ async def delete_item( if response_hook: response_hook(self.client_connection.last_response_headers, result) + @distributed_trace_async + async def read_offer(self, **kwargs): + # type: (Any) -> Offer + """Read the Offer object for this container. + + If no Offer already exists for the container, an exception is raised. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No offer exists for the container or + the offer could not be retrieved. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = self.client_connection.QueryOffers(query_spec, **kwargs) + if not offers: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + + throughput, curr_offer = None, None + async for offer in offers: + if not offer: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + else: + throughput = offer["content"]["offerThroughput"] + curr_offer = offer + StopAsyncIteration + + if response_hook: + response_hook(self.client_connection.last_response_headers, offers) + + return Offer(offer_throughput=throughput, properties=curr_offer) + + @distributed_trace_async + async def replace_throughput(self, throughput, **kwargs): + # type: (int, Any) -> Offer + """Replace the container's throughput. + + If no Offer already exists for the container, an exception is raised. + + :param throughput: The throughput to be set (an integer). + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the container, updated with new throughput. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No offer exists for the container + or the offer could not be updated. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = self.client_connection.QueryOffers(query_spec, **kwargs) + if not offers: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + + curr_offer = None + async for offer in offers: + if not offer: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + else: + curr_offer = offer + StopAsyncIteration + + new_offer = curr_offer.copy() + new_offer["content"]["offerThroughput"] = throughput + data = await self.client_connection.ReplaceOffer(offer_link=curr_offer["_self"], offer=curr_offer, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) + + @distributed_trace + def list_conflicts(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the conflicts in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of conflicts (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadConflicts( + collection_link=self.container_link, feed_options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_conflicts( + self, + query, # type: str + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + partition_key=None, # type: Optional[Any] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Return all conflicts matching a given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param enable_cross_partition_query: Allows sending of more than one request to execute + the query in the Azure Cosmos DB service. + More than one request is necessary if the query is not scoped to single partition key value. + :param partition_key: Specifies the partition key value for the item. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of conflicts (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.QueryConflicts( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def get_conflict(self, conflict, partition_key, **kwargs): # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 2261a430c75b..06bf9e78827c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -33,7 +33,7 @@ from ..cosmos_client import _parse_connection_str, _build_auth from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options -from ._retry_utility import ConnectionRetryPolicy +from ._retry_utility_async import ConnectionRetryPolicy from .database import DatabaseProxy from ..documents import ConnectionPolicy, DatabaseAccount from ..exceptions import CosmosResourceNotFoundError diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index a486a0364a47..94fc65b7d936 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -35,50 +35,62 @@ async def async_crud_test(): async with AsyncClient(endpoint, key) as client: db = await client.create_database(db_name) print("Created DB, now reading and attempting create_if_not_exist") + await db.read() db = await client.create_database_if_not_exists(db_name) print("Create if not exist had no problems, deleting DB now") + await client.delete_database(db_name) print("DB deleted, now attempting read") try: await db.read() except: print("Error returned successfully for reading DB") + print("Re-creating DB for testing container methods") db = await client.create_database_if_not_exists(db_name) cont = await db.create_container(id=cont_name, partition_key=PartitionKey(path="/lastName")) - print("Created CONT, now reading and attempting create_if_not_exists") + print("Created container, now reading and attempting create_if_not_exists") + c = await cont.read() cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) - print("Create if not exist had no problems, replacing and deleting CONT now") + print("Create if not exist had no problems, replacing and deleting container now") + assert c.get('defaultTtl') is None await db.replace_container(container=cont_name, partition_key=PartitionKey(path='/lastName'), default_ttl=ttl) c = await cont.read() assert c.get('defaultTtl') == 200 - print("CONT properties changed, now deleting") + print("Container properties changed, now deleting") + await db.delete_container(cont_name) - print("CONT deleted, now attempting read") + print("Container deleted, now attempting read") try: await cont.read() except: print("Error returned succesfully") - print("Re-creating CONT for testing item methods") + + print("Re-creating container for testing item methods") cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) + body1 = get_test_item() await cont.create_item(body=body1) print("Created item, now reading and then upserting/replacing") + body2 = get_test_item() await cont.upsert_item(body=body1) # Check here for read all items and verify there is still only 1 left after upsert await cont.replace_item(item=body1["id"], body=body2) print("Item replaced, now attempting read") + try: await cont.read_item(item=body1.get("id"), partition_key=body1.get("lastName")) except: print("Error returned succesfully, reading and deleting replaced item now") + await cont.read_item(item=body2.get("id"), partition_key=body2.get("lastName")) await cont.delete_item(item=body2.get("id"), partition_key=body2.get("lastName")) print("Item deleted, now attempting read") + try: await cont.read_item(item=body2.get("id"), partition_key=body2.get("lastName")) except: @@ -180,15 +192,51 @@ def user_test(): perms = u.list_permissions() print(list(perms)) -def wrong_test(): - # client = SyncClient(endpoint, key) - # db = client.get_database_client("db111") - # cont = db.get_container_client("c111") - # cont.read() - # id = "Async_cc4b235e-ce8e-4b4f-835d-3c29182f0639" - # cont.read_item(item="wow", partition_key=id) - client = SyncClient.from_connection_string("") - print(list(client.list_databases())) +async def qta(): + async with AsyncClient(endpoint, key) as client: + db = await client.create_database_if_not_exists("qta") + cont = await db.create_container_if_not_exists(id="qtac", partition_key=PartitionKey(path="/id")) + itemId = "Async_e402afa6-badf-43f2-8ddd-83776221cb3a" + print("attempting query") + + y = await cont.read_offer() + print(type(y)) + print(y) + print(y.properties) + print(y.offer_throughput) + + print("replacing") + x = await cont.replace_throughput(throughput=400) + print(type(x)) + print(x.properties) + print(x.offer_throughput) + + z = cont.list_conflicts() + print(type(z)) + print(z) + + # query = "SELECT * FROM c WHERE c.id=@id" + # items = cont.query_items( + # query=query, + # parameters=[{"name":"@id", "value": itemId}], + # enable_cross_partition_query=True) + + # async for item in items: + # print(item) + + + # x = cont.read_all_items() + # #async for item in items + # # + # async for item in x: + # print(item) + +def qt(): + client = SyncClient(endpoint, key) + db = client.create_database_if_not_exists(id="qt") + container = db.create_container_if_not_exists( + id="qtc", + partition_key=PartitionKey(path="/id")) # async def read_all(): # async with AsyncClient(endpoint, key) as client: @@ -202,7 +250,9 @@ def wrong_test(): async def main(): # await read_tests() - await async_crud_test() + # await async_crud_test() + await qta() + qt() if __name__ == "__main__": From 72de7c849d0360a3ef59415ef107fcbfd2274116 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 21 Oct 2021 17:21:30 -0400 Subject: [PATCH 19/56] missing some execution logic and tests --- sdk/cosmos/azure-cosmos/README.md | 2 +- .../azure/cosmos/_cosmos_client_connection.py | 1 + .../cosmos/_execution_context/aio/__init__.py | 20 + ...ext_async.py => base_execution_context.py} | 8 +- ...tcher_async.py => execution_dispatcher.py} | 4 +- .../aio/multi_execution_aggregator.py | 159 +++++ .../_execution_context/endpoint_component.py | 6 +- .../azure/cosmos/_retry_utility.py | 2 +- .../azure/cosmos/_routing/aio/__init__.py | 20 + .../_routing/aio/routing_map_provider.py | 193 +++++ .../azure-cosmos/azure/cosmos/aio/__init__.py | 36 + .../azure/cosmos/aio/_asynchronous_request.py | 2 +- .../aio/_cosmos_client_connection_async.py | 548 +++++++++++++- .../aio/_global_endpoint_manager_async.py | 2 +- .../azure/cosmos/aio/_query_iterable_async.py | 4 +- .../azure/cosmos/aio/_retry_utility_async.py | 2 +- .../azure/cosmos/aio/container.py | 44 +- .../azure/cosmos/aio/cosmos_client.py | 78 +- .../azure-cosmos/azure/cosmos/aio/database.py | 202 +++++- .../azure-cosmos/azure/cosmos/aio/scripts.py | 222 +++--- .../azure-cosmos/azure/cosmos/aio/user.py | 299 ++++++++ ...access_cosmos_with_resource_token_async.py | 248 +++++++ .../samples/change_feed_management.py | 1 - .../samples/change_feed_management_async.py | 99 +++ .../samples/container_management.py | 1 + .../samples/container_management_async.py | 317 +++++++++ .../samples/database_management.py | 1 + .../samples/database_management_async.py | 150 ++++ .../samples/document_management_async.py | 208 ++++++ .../azure-cosmos/samples/index_management.py | 6 +- .../samples/index_management_async.py | 667 ++++++++++++++++++ .../azure-cosmos/samples/simon_testfile.py | 122 +++- 32 files changed, 3467 insertions(+), 207 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py rename sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/{base_execution_context_async.py => base_execution_context.py} (97%) rename sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/{execution_dispatcher_async.py => execution_dispatcher.py} (97%) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py create mode 100644 sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/change_feed_management_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/container_management_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/database_management_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/document_management_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/index_management_async.py diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index 88a8232cdc0c..a6b16d8c7852 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -97,7 +97,7 @@ Currently the features below are **not supported**. For alternatives options, ch ### Data Plane Limitations: * Group By queries -* Language Native async i/o +* Language Native async i/o (NO LONGER A LIMITATION) * Queries with COUNT from a DISTINCT subquery: SELECT COUNT (1) FROM (SELECT DISTINCT C.ID FROM C) * Bulk/Transactional batch processing * Direct TCP Mode access diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index fabe08cf9161..5095be182ecb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -2480,6 +2480,7 @@ def __CheckAndUnifyQueryFormat(self, query_body): @staticmethod def __ValidateResource(resource): + print(resource) id_ = resource.get("id") if id_: try: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py new file mode 100644 index 000000000000..f5373937e446 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py similarity index 97% rename from sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py index c26eedbe1740..d097ebd5f8b0 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py @@ -72,7 +72,7 @@ async def fetch_next_block(self): if not self._has_more_pages(): return [] - if self._buffer: #might need async version of this + if self._buffer: # if there is anything in the buffer returns that res = list(self._buffer) self._buffer.clear() @@ -93,10 +93,10 @@ async def __anext__(self): :return: The next query result. :rtype: dict - :raises StopIteration: If no more result is left. + :raises StopAsyncIteration: If no more result is left. """ if self._has_finished: - raise StopIteration + raise StopAsyncIteration if not self._buffer: @@ -104,7 +104,7 @@ async def __anext__(self): self._buffer.extend(results) if not self._buffer: - raise StopIteration + raise StopAsyncIteration return self._buffer.popleft() diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py similarity index 97% rename from sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py index 5bc87d4f1138..3eb4341bf6f7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py @@ -26,8 +26,8 @@ from six.moves import xrange from azure.cosmos.exceptions import CosmosHttpResponseError from azure.cosmos._execution_context.execution_dispatcher import _is_partitioned_execution_info, _get_partitioned_execution_info -from azure.cosmos._execution_context import multi_execution_aggregator -from azure.cosmos._execution_context.aio.base_execution_context_async import _QueryExecutionContextBase, _DefaultQueryExecutionContext +from azure.cosmos._execution_context.aio import multi_execution_aggregator +from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase, _DefaultQueryExecutionContext from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo from azure.cosmos._execution_context import endpoint_component from azure.cosmos.documents import _DistinctType diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py new file mode 100644 index 000000000000..12af9cb22162 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py @@ -0,0 +1,159 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for multi execution context aggregator implementation in the Azure Cosmos database service. +""" + +import heapq +from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context import document_producer +from azure.cosmos._routing import routing_range + +# pylint: disable=protected-access + + +class _MultiExecutionContextAggregator(_QueryExecutionContextBase): + """This class is capable of queries which requires rewriting based on + backend's returned query execution info. + + This class maintains the execution context for each partition key range + and aggregates the corresponding results from each execution context. + + When handling an orderby query, _MultiExecutionContextAggregator + instantiates one instance of DocumentProducer per target partition key range + and aggregates the result of each. + """ + + # TODO improvement: this class needs to be parallelized + + class PriorityQueue: + """Provides a Priority Queue abstraction data structure""" + + def __init__(self): + self._heap = [] + + def pop(self): + return heapq.heappop(self._heap) + + def push(self, item): + heapq.heappush(self._heap, item) + + def peek(self): + return self._heap[0] + + def size(self): + return len(self._heap) + + def __init__(self, client, resource_link, query, options, partitioned_query_ex_info): + super(_MultiExecutionContextAggregator, self).__init__(client, options) + + # use the routing provider in the client + self._routing_provider = client._routing_map_provider + self._client = client + self._resource_link = resource_link + self._query = query + self._partitioned_query_ex_info = partitioned_query_ex_info + self._sort_orders = partitioned_query_ex_info.get_order_by() + + if self._sort_orders: + self._document_producer_comparator = document_producer._OrderByDocumentProducerComparator(self._sort_orders) + else: + self._document_producer_comparator = document_producer._PartitionKeyRangeDocumentProduerComparator() + + # will be a list of (partition_min, partition_max) tuples + targetPartitionRanges = self._get_target_partition_key_range() + + targetPartitionQueryExecutionContextList = [] + for partitionTargetRange in targetPartitionRanges: + # create and add the child execution context for the target range + targetPartitionQueryExecutionContextList.append( + self._createTargetPartitionQueryExecutionContext(partitionTargetRange) + ) + + self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() + + for targetQueryExContext in targetPartitionQueryExecutionContextList: + + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + targetQueryExContext.peek() + # if there are matching results in the target ex range add it to the priority queue + + self._orderByPQ.push(targetQueryExContext) + + except StopIteration: + continue + + async def __anext__(self): + """Returns the next result + + :return: The next result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + if self._orderByPQ.size() > 0: + + targetRangeExContext = self._orderByPQ.pop() + res = next(targetRangeExContext) + + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + targetRangeExContext.peek() + self._orderByPQ.push(targetRangeExContext) + + except StopIteration: + pass + + return res + raise StopAsyncIteration + + def fetch_next_block(self): + + raise NotImplementedError("You should use pipeline's fetch_next_block.") + + def _createTargetPartitionQueryExecutionContext(self, partition_key_target_range): + + rewritten_query = self._partitioned_query_ex_info.get_rewritten_query() + if rewritten_query: + if isinstance(self._query, dict): + # this is a parameterized query, collect all the parameters + query = dict(self._query) + query["query"] = rewritten_query + else: + query = rewritten_query + else: + query = self._query + + return document_producer._DocumentProducer( + partition_key_target_range, + self._client, + self._resource_link, + query, + self._document_producer_comparator, + self._options, + ) + + async def _get_target_partition_key_range(self): + + query_ranges = self._partitioned_query_ex_info.get_query_ranges() + return await self._routing_provider.get_overlapping_ranges( + self._resource_link, [routing_range.Range.ParseFromDict(range_as_dict) for range_as_dict in query_ranges] + ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 54208bdad176..7875ce70fa67 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -73,7 +73,7 @@ async def __anext__(self): res = await self._execution_context.__anext__() self._top_count -= 1 return res - raise StopIteration + raise StopAsyncIteration class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): @@ -152,7 +152,7 @@ async def __anext__(self): if res is not None: self._offset_count -= 1 else: - raise StopIteration + raise StopAsyncIteration return await self._execution_context.__anext__() @@ -195,4 +195,4 @@ async def __anext__(self): res = self._results[self._result_index] self._result_index += 1 return res - raise StopIteration + raise StopAsyncIteration diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py index 271e69584e2f..62747c83d294 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py @@ -101,7 +101,7 @@ def Execute(client, global_endpoint_manager, function, *args, **kwargs): retry_policy = defaultRetry_policy # If none of the retry policies applies or there is no retry needed, set the - # throttle related response hedaers and re-throw the exception back arg[0] + # throttle related response headers and re-throw the exception back arg[0] # is the request. It needs to be modified for write forbidden exception if not retry_policy.ShouldRetry(e): if not client.last_response_headers: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py new file mode 100644 index 000000000000..f5373937e446 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py new file mode 100644 index 000000000000..474b26e4550b --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py @@ -0,0 +1,193 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for partition key range cache implementation in the Azure +Cosmos database service. +""" + +from ... import _base +from ..collection_routing_map import CollectionRoutingMap +from .. import routing_range + +# pylint: disable=protected-access + + +class PartitionKeyRangeCache(object): + """ + PartitionKeyRangeCache provides list of effective partition key ranges for a + collection. + + This implementation loads and caches the collection routing map per + collection on demand. + """ + + def __init__(self, client): + """ + Constructor + """ + + self._documentClient = client + + # keeps the cached collection routing map by collection id + self._collection_routing_map_by_item = {} + + async def get_overlapping_ranges(self, collection_link, partition_key_ranges): + """Given a partition key range and a collection, return the list of + overlapping partition key ranges. + + :param str collection_link: The name of the collection. + :param list partition_key_range: List of partition key range. + :return: List of overlapping partition key ranges. + :rtype: list + """ + cl = self._documentClient + + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) + + collection_routing_map = self._collection_routing_map_by_item.get(collection_id) + if collection_routing_map is None: + collection_pk_ranges = [pk async for pk in cl._ReadPartitionKeyRanges(collection_link)] + # for large collections, a split may complete between the read partition key ranges query page responses, + # causing the partitionKeyRanges to have both the children ranges and their parents. Therefore, we need + # to discard the parent ranges to have a valid routing map. + collection_pk_ranges = PartitionKeyRangeCache._discard_parent_ranges(collection_pk_ranges) + collection_routing_map = CollectionRoutingMap.CompleteRoutingMap( + [(r, True) for r in collection_pk_ranges], collection_id + ) + self._collection_routing_map_by_item[collection_id] = collection_routing_map + return collection_routing_map.get_overlapping_ranges(partition_key_ranges) + + @staticmethod + def _discard_parent_ranges(partitionKeyRanges): + parentIds = set() + for r in partitionKeyRanges: + if isinstance(r, dict) and routing_range.PartitionKeyRange.Parents in r: + for parentId in r[routing_range.PartitionKeyRange.Parents]: + parentIds.add(parentId) + return (r for r in partitionKeyRanges if r[routing_range.PartitionKeyRange.Id] not in parentIds) + + +def _second_range_is_after_first_range(range1, range2): + if range1.max > range2.min: + ##r.min < #previous_r.max + return False + + if range2.min == range1.max and range1.isMaxInclusive and range2.isMinInclusive: + # the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r + return False + + return True + + +def _is_sorted_and_non_overlapping(ranges): + for idx, r in list(enumerate(ranges))[1:]: + previous_r = ranges[idx - 1] + if not _second_range_is_after_first_range(previous_r, r): + return False + return True + + +def _subtract_range(r, partition_key_range): + """Evaluates and returns r - partition_key_range + + :param dict partition_key_range: Partition key range. + :param routing_range.Range r: query range. + :return: The subtract r - partition_key_range. + :rtype: routing_range.Range + """ + + left = max(partition_key_range[routing_range.PartitionKeyRange.MaxExclusive], r.min) + + if left == r.min: + leftInclusive = r.isMinInclusive + else: + leftInclusive = False + + queryRange = routing_range.Range(left, r.max, leftInclusive, r.isMaxInclusive) + return queryRange + + +class SmartRoutingMapProvider(PartitionKeyRangeCache): + """ + Efficiently uses PartitionKeyRangeCach and minimizes the unnecessary + invocation of CollectionRoutingMap.get_overlapping_ranges() + """ + + def get_overlapping_ranges(self, collection_link, partition_key_ranges): + """ + Given the sorted ranges and a collection, + Returns the list of overlapping partition key ranges + + :param str collection_link: The collection link. + :param (list of routing_range.Range) partition_key_ranges: + The sorted list of non-overlapping ranges. + :return: List of partition key ranges. + :rtype: list of dict + :raises ValueError: + If two ranges in partition_key_ranges overlap or if the list is not sorted + """ + + # validate if the list is non-overlapping and sorted + if not _is_sorted_and_non_overlapping(partition_key_ranges): + raise ValueError("the list of ranges is not a non-overlapping sorted ranges") + + target_partition_key_ranges = [] + + it = iter(partition_key_ranges) + try: + currentProvidedRange = next(it) + while True: + if currentProvidedRange.isEmpty(): + # skip and go to the next item\ + currentProvidedRange = next(it) + continue + + if target_partition_key_ranges: + queryRange = _subtract_range(currentProvidedRange, target_partition_key_ranges[-1]) + else: + queryRange = currentProvidedRange + + overlappingRanges = PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) + assert overlappingRanges, "code bug: returned overlapping ranges for queryRange {} is empty".format( + queryRange + ) + target_partition_key_ranges.extend(overlappingRanges) + + lastKnownTargetRange = routing_range.Range.PartitionKeyRangeToRange(target_partition_key_ranges[-1]) + + # the overlapping ranges must contain the requested range + assert ( + currentProvidedRange.max <= lastKnownTargetRange.max + ), "code bug: returned overlapping ranges {} does not contain the requested range {}".format( + overlappingRanges, queryRange + ) + + # the current range is contained in target_partition_key_ranges just move forward + currentProvidedRange = next(it) + + while currentProvidedRange.max <= lastKnownTargetRange.max: + # the current range is covered too. just move forward + currentProvidedRange = next(it) + except StopIteration: + # when the iteration is exhausted we get here. There is nothing else to be done + pass + + return target_partition_key_ranges diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py index e69de29bb2d1..ca9d2e221831 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py @@ -0,0 +1,36 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from ._retry_utility_async import ConnectionRetryPolicy +from .container import ContainerProxy +from .cosmos_client import CosmosClient +from .database import DatabaseProxy +from .user import UserProxy +from .scripts import ScriptsProxy + +__all__ = ( + "CosmosClient", + "DatabaseProxy", + "ContainerProxy", + "ScriptsProxy", + "UserProxy", + "ConnectionRetryPolicy" +) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index bccc4ed68109..fe292174dbe7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -31,7 +31,7 @@ from .. import exceptions from .. import http_constants -=from . import _retry_utility_async +from . import _retry_utility_async from .._synchronized_request import _request_body_from_data diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index a23815559a8d..5a86a5770c92 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -53,7 +53,7 @@ from .. import _request_object from . import _asynchronous_request as asynchronous_request from . import _global_endpoint_manager_async as global_endpoint_manager_async -from .._routing import routing_map_provider +from .._routing.aio import routing_map_provider from ._retry_utility_async import ConnectionRetryPolicy from .. import _session from .. import _utils @@ -399,6 +399,28 @@ async def CreateItem(self, database_or_container_link, document, options=None, * return await self.Create(document, path, "docs", collection_id, None, options, **kwargs) + async def CreatePermission(self, user_link, permission, options=None, **kwargs): + """Creates a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to create. + :param dict options: + The request options for the request. + + :return: + The created Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return await self.Create(permission, path, "permissions", user_id, None, options, **kwargs) + async def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): """Creates a user-defined function in a collection. @@ -547,6 +569,28 @@ async def UpsertUser(self, database_link, user, options=None, **kwargs): database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) return await self.Upsert(user, path, "users", database_id, None, options, **kwargs) + async def UpsertPermission(self, user_link, permission, options=None, **kwargs): + """Upserts a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return await self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) + async def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): """Upserts a document in a collection. @@ -705,6 +749,48 @@ async def ReadItem(self, document_link, options=None, **kwargs): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return await self.Read(path, "docs", document_id, None, options, **kwargs) + async def ReadUser(self, user_link, options=None, **kwargs): + """Reads a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The read User. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return await self.Read(path, "users", user_id, None, options, **kwargs) + + async def ReadPermission(self, permission_link, options=None, **kwargs): + """Reads a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The read permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return await self.Read(path, "permissions", permission_id, None, options, **kwargs) + async def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): """Reads a user-defined function. @@ -862,6 +948,29 @@ async def ReplaceUser(self, user_link, user, options=None, **kwargs): user_id = base.GetResourceIdOrFullNameFromLink(user_link) return await self.Replace(user, path, "users", user_id, None, options, **kwargs) + async def ReplacePermission(self, permission_link, permission, options=None, **kwargs): + """Replaces a permission and return it. + + :param str permission_link: + The link to the permission. + :param dict permission: + :param dict options: + The request options for the request. + + :return: + The new Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return await self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) + async def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): """Replaces a collection and return it. @@ -1124,6 +1233,27 @@ async def DeleteUser(self, user_link, options=None, **kwargs): user_id = base.GetResourceIdOrFullNameFromLink(user_link) return await self.DeleteResource(path, "users", user_id, None, options, **kwargs) + async def DeletePermission(self, permission_link, options=None, **kwargs): + """Deletes a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The deleted Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return await self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) + async def DeleteContainer(self, collection_link, options=None, **kwargs): """Deletes a collection. @@ -1306,6 +1436,154 @@ async def __Delete(self, path, request_params, req_headers, **kwargs): **kwargs ) + def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): + """Reads Partition Key Ranges. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) + + def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): + """Queries Partition Key Ranges in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "pkranges") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadDatabases(self, options=None, **kwargs): + """Reads all databases. + + :param dict options: + The request options for the request. + + :return: + Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryDatabases(None, options, **kwargs) + + def QueryDatabases(self, query, options=None, **kwargs): + """Queries databases. + + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + "/dbs", "dbs", "", lambda r: r["Databases"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadContainers(self, database_link, options=None, **kwargs): + """Reads all collections in a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryContainers(database_link, None, options, **kwargs) + + def QueryContainers(self, database_link, query, options=None, **kwargs): + """Queries collections in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "colls", database_id, lambda r: r["DocumentCollections"], + lambda _, body: body, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): """Reads all documents in a collection. @@ -1514,6 +1792,268 @@ async def fetch_fn(options): page_iterator_class=query_iterable.QueryIterable ) + def ReadUsers(self, database_link, options=None, **kwargs): + """Reads all users in a database. + + :params str database_link: + The link to the database. + :params dict options: + The request options for the request. + :return: + Query iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUsers(database_link, None, options, **kwargs) + + def QueryUsers(self, database_link, query, options=None, **kwargs): + """Queries users in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "users", database_id, lambda r: r["Users"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadPermissions(self, user_link, options=None, **kwargs): + """Reads all permissions for a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryPermissions(user_link, None, options, **kwargs) + + def QueryPermissions(self, user_link, query, options=None, **kwargs): + """Queries permissions for a user. + + :param str user_link: + The link to the user entity. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadStoredProcedures(self, collection_link, options=None, **kwargs): + """Reads all store procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryStoredProcedures(collection_link, None, options, **kwargs) + + def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): + """Queries stored procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "sprocs", collection_id, lambda r: r["StoredProcedures"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadTriggers(self, collection_link, options=None, **kwargs): + """Reads all triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryTriggers(collection_link, None, options, **kwargs) + + def QueryTriggers(self, collection_link, query, options=None, **kwargs): + """Queries triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): + """Reads all user-defined functions in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) + + def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): + """Queries user-defined functions in a collection. + + :param str collection_link: + The link to the collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): """Reads conflicts. @@ -1749,6 +2289,12 @@ def _GetContainerIdWithPathForItem(self, database_or_container_link, document, o collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, document, path + def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return path, user_id + def RegisterPartitionResolver(self, database_link, partition_resolver): """Registers the partition resolver associated with the database link diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 4d5cd77e91cb..78070648cf16 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -52,7 +52,7 @@ def __init__(self, client): self.refresh_time_interval_in_ms, ) self.refresh_needed = False - self.refresh_lock = asyncio.Lock() #Lock vs. RLock + self.refresh_lock = asyncio.Lock() self.last_refresh_time = 0 def get_refresh_time_interval_in_ms_stub(self): # pylint: disable=no-self-use diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py index 4a3554154963..c554bbf1bfa9 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py @@ -22,7 +22,7 @@ """Iterable query results in the Azure Cosmos database service. """ from azure.core.async_paging import AsyncPageIterator -from azure.cosmos._execution_context.aio import execution_dispatcher_async +from azure.cosmos._execution_context.aio import execution_dispatcher # pylint: disable=protected-access @@ -72,7 +72,7 @@ def __init__( self._collection_link = collection_link self._database_link = database_link self._partition_key = partition_key - self._ex_context = execution_dispatcher_async._ProxyQueryExecutionContext( + self._ex_context = execution_dispatcher._ProxyQueryExecutionContext( self._client, self._collection_link, self._query, self._options, self._fetch_function ) super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py index 39e2bd1264e3..cb926977844d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py @@ -104,7 +104,7 @@ async def ExecuteAsync(client, global_endpoint_manager, function, *args, **kwarg retry_policy = defaultRetry_policy # If none of the retry policies applies or there is no retry needed, set the - # throttle related response hedaers and re-throw the exception back arg[0] + # throttle related response headers and re-throw the exception back arg[0] # is the request. It needs to be modified for write forbidden exception if not retry_policy.ShouldRetry(e): if not client.last_response_headers: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index aa6610cfae50..3c16b38ca01d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -582,27 +582,16 @@ async def read_offer(self, **kwargs): "query": "SELECT * FROM root r WHERE r.resource=@link", "parameters": [{"name": "@link", "value": link}], } - offers = self.client_connection.QueryOffers(query_spec, **kwargs) - if not offers: + offers = [offer async for offer in self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(offers) == 0: raise CosmosResourceNotFoundError( status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for container " + self.container_link) - - throughput, curr_offer = None, None - async for offer in offers: - if not offer: - raise CosmosResourceNotFoundError( - status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for container " + self.container_link) - else: - throughput = offer["content"]["offerThroughput"] - curr_offer = offer - StopAsyncIteration + message="Could not find Offer for database " + self.database_link) if response_hook: response_hook(self.client_connection.last_response_headers, offers) - - return Offer(offer_throughput=throughput, properties=curr_offer) + + return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) @distributed_trace_async async def replace_throughput(self, throughput, **kwargs): @@ -625,26 +614,15 @@ async def replace_throughput(self, throughput, **kwargs): "query": "SELECT * FROM root r WHERE r.resource=@link", "parameters": [{"name": "@link", "value": link}], } - offers = self.client_connection.QueryOffers(query_spec, **kwargs) - if not offers: + offers = [offer async for offer in self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(offers) == 0: raise CosmosResourceNotFoundError( status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for container " + self.container_link) - - curr_offer = None - async for offer in offers: - if not offer: - raise CosmosResourceNotFoundError( - status_code=StatusCodes.NOT_FOUND, - message="Could not find Offer for container " + self.container_link) - else: - curr_offer = offer - StopAsyncIteration - - new_offer = curr_offer.copy() - new_offer["content"]["offerThroughput"] = throughput - data = await self.client_connection.ReplaceOffer(offer_link=curr_offer["_self"], offer=curr_offer, **kwargs) + message="Could not find Offer for database " + self.database_link) + new_offer = offers[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = await self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, data) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 06bf9e78827c..e2c94513b4d8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -22,13 +22,11 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -#Missing methods: -#list_databases(), query_databases(), - from typing import Any, Dict, Optional, Union, cast, Iterable, List import six -from azure.core.tracing.decorator_async import distributed_trace_async # pylint: disable=unused-import +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace from ..cosmos_client import _parse_connection_str, _build_auth from ._cosmos_client_connection_async import CosmosClientConnection @@ -289,6 +287,78 @@ def get_database_client(self, database): return DatabaseProxy(self.client_connection, id_value) + @distributed_trace + def list_databases( + self, + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List the databases in a Cosmos DB SQL database account. + + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of database properties (dicts). + :rtype: Iterable[dict[str, str]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace + def query_databases( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Query the databases in a Cosmos DB SQL database account. + + :param str query: The Azure Cosmos DB SQL query to execute. + :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be + served as indexing was opted out on the requested paths. + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of database properties (dicts). + :rtype: Iterable[dict[str, str]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.QueryDatabases( + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + @distributed_trace_async async def delete_database( self, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index ba2fc513d0ea..4e26f19ab4a5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -26,7 +26,8 @@ import warnings import six -from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options @@ -34,7 +35,7 @@ from ..offer import Offer from ..http_constants import StatusCodes from ..exceptions import CosmosResourceNotFoundError -from ..user import UserProxy +from .user import UserProxy from ..documents import IndexingMode __all__ = ("DatabaseProxy",) @@ -42,9 +43,6 @@ # pylint: disable=protected-access # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -#Missing query methods: -#list_containers(), query_containers(), list_users(), query_users(), read_offer(), replace_throughput() - class DatabaseProxy(object): """An interface to interact with a specific database. @@ -111,6 +109,12 @@ def _get_user_link(self, user_or_id): pass return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) + async def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = await self.read() + return self._properties + @distributed_trace_async async def read(self, populate_query_metrics=None, **kwargs): # type: (Optional[bool], Any) -> Dict[str, Any] @@ -326,6 +330,82 @@ def get_container_client(self, container): return ContainerProxy(self.client_connection, self.database_link, id_value) + @distributed_trace + def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): + # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] + """List the containers in the database. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START list_containers] + :end-before: [END list_containers] + :language: python + :dedent: 0 + :caption: List all containers in the database: + :name: list_containers + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.ReadContainers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_containers( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List the properties for containers in the current database. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.QueryContainers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def replace_container( self, @@ -493,6 +573,55 @@ def get_user_client(self, user): return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + @distributed_trace + def list_users(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the users in the container. + + :param max_item_count: Max number of users to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of user properties (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadUsers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_users(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all users matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of users to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of user properties (dicts). + :rtype: Iterable[str, Any] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.QueryUsers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + @distributed_trace_async async def upsert_user(self, body, **kwargs): # type: (Dict[str, Any], Any) -> UserProxy @@ -576,4 +705,65 @@ async def delete_user(self, user, **kwargs): user_link=self._get_user_link(user), options=request_options, **kwargs ) if response_hook: - response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace_async + async def read_offer(self, **kwargs): + # type: (Any) -> Offer + """Read the Offer object for this database. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the database. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If no offer exists for the database or if the offer could not be retrieved. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = [offer async for offer in self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(offers) == 0: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for database " + self.database_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, offers) + + return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) + + @distributed_trace_async + async def replace_throughput(self, throughput, **kwargs): + # type: (Optional[int], Any) -> Offer + """Replace the database-level throughput. + + :param throughput: The throughput to be set (an integer). + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the database, updated with new throughput. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If no offer exists for the database or if the offer could not be updated. + :rtype: ~azure.cosmos.Offer + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + offers = [offer async for offer in self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(offers) == 0: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for database " + self.database_link) + + new_offer = offers[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = await self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 3acc8780917c..4e52bce701d0 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -59,44 +59,42 @@ def _get_resource_link(self, script_or_id, typ): return u"{}/{}/{}".format(self.container_link, typ, script_or_id) return script_or_id["_self"] - #TODO add async query functionality - # async def list_stored_procedures(self, max_item_count=None, **kwargs): - # # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] - # """List all stored procedures in the container. - - # :param int max_item_count: Max number of items to be returned in the enumeration operation. - # :returns: An Iterable of stored procedures (dicts). - # :rtype: Iterable[dict[str, Any]] - # """ - # feed_options = build_options(kwargs) - # if max_item_count is not None: - # feed_options["maxItemCount"] = max_item_count - - # return await self.client_connection.ReadStoredProcedures( - # collection_link=self.container_link, options=feed_options, **kwargs - # ) - - #TODO add async query functionality - # async def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): - # # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] - # """Return all stored procedures matching the given `query`. - - # :param query: The Azure Cosmos DB SQL query to execute. - # :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - # :param max_item_count: Max number of items to be returned in the enumeration operation. - # :returns: An Iterable of stored procedures (dicts). - # :rtype: Iterable[dict[str, Any]] - # """ - # feed_options = build_options(kwargs) - # if max_item_count is not None: - # feed_options["maxItemCount"] = max_item_count - - # return await self.client_connection.QueryStoredProcedures( - # collection_link=self.container_link, - # query=query if parameters is None else dict(query=query, parameters=parameters), - # options=feed_options, - # **kwargs - # ) + def list_stored_procedures(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all stored procedures in the container. + + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of stored procedures (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadStoredProcedures( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all stored procedures matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of stored procedures (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.QueryStoredProcedures( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) async def get_stored_procedure(self, sproc, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] @@ -209,44 +207,42 @@ async def execute_stored_procedure( **kwargs ) - #TODO add query functionality - # async def list_triggers(self, max_item_count=None, **kwargs): - # # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] - # """List all triggers in the container. - - # :param max_item_count: Max number of items to be returned in the enumeration operation. - # :returns: An Iterable of triggers (dicts). - # :rtype: Iterable[dict[str, Any]] - # """ - # feed_options = build_options(kwargs) - # if max_item_count is not None: - # feed_options["maxItemCount"] = max_item_count - - # return await self.client_connection.ReadTriggers( - # collection_link=self.container_link, options=feed_options, **kwargs - # ) - - #TODO add query functionality - # async def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): - # # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] - # """Return all triggers matching the given `query`. - - # :param query: The Azure Cosmos DB SQL query to execute. - # :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - # :param max_item_count: Max number of items to be returned in the enumeration operation. - # :returns: An Iterable of triggers (dicts). - # :rtype: Iterable[dict[str, Any]] - # """ - # feed_options = build_options(kwargs) - # if max_item_count is not None: - # feed_options["maxItemCount"] = max_item_count - - # return await self.client_connection.QueryTriggers( - # collection_link=self.container_link, - # query=query if parameters is None else dict(query=query, parameters=parameters), - # options=feed_options, - # **kwargs - # ) + def list_triggers(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all triggers in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of triggers (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadTriggers( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all triggers matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of triggers (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.QueryTriggers( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) async def get_trigger(self, trigger, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] @@ -319,44 +315,42 @@ async def delete_trigger(self, trigger, **kwargs): trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) - #TODO add query functionality - # async def list_user_defined_functions(self, max_item_count=None, **kwargs): - # # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] - # """List all the user-defined functions in the container. - - # :param max_item_count: Max number of items to be returned in the enumeration operation. - # :returns: An Iterable of user-defined functions (dicts). - # :rtype: Iterable[dict[str, Any]] - # """ - # feed_options = build_options(kwargs) - # if max_item_count is not None: - # feed_options["maxItemCount"] = max_item_count - - # return await self.client_connection.ReadUserDefinedFunctions( - # collection_link=self.container_link, options=feed_options, **kwargs - # ) - - #TODO add query functionality - # async def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): - # # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] - # """Return user-defined functions matching a given `query`. - - # :param query: The Azure Cosmos DB SQL query to execute. - # :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - # :param max_item_count: Max number of items to be returned in the enumeration operation. - # :returns: An Iterable of user-defined functions (dicts). - # :rtype: Iterable[dict[str, Any]] - # """ - # feed_options = build_options(kwargs) - # if max_item_count is not None: - # feed_options["maxItemCount"] = max_item_count - - # return await self.client_connection.QueryUserDefinedFunctions( - # collection_link=self.container_link, - # query=query if parameters is None else dict(query=query, parameters=parameters), - # options=feed_options, - # **kwargs - # ) + def list_user_defined_functions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the user-defined functions in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of user-defined functions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadUserDefinedFunctions( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return user-defined functions matching a given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of user-defined functions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.QueryUserDefinedFunctions( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) async def get_user_defined_function(self, udf, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py new file mode 100644 index 000000000000..ec9463f2d0f2 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -0,0 +1,299 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + +"""Create, read, update and delete users in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, cast, Iterable, Optional + +import six +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from ..permission import Permission + + +class UserProxy(object): + """An interface to interact with a specific user. + + This class should not be instantiated directly. Instead, use the + :func:`DatabaseProxy.get_user_client` method. + """ + + def __init__(self, client_connection, id, database_link, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None + self.client_connection = client_connection + self.id = id + self.user_link = u"{}/users/{}".format(database_link, id) + self._properties = properties + + def __repr__(self): + # type () -> str + return "".format(self.user_link)[:1024] + + def _get_permission_link(self, permission_or_id): + # type: (Union[Permission, str, Dict[str, Any]]) -> str + if isinstance(permission_or_id, six.string_types): + return u"{}/permissions/{}".format(self.user_link, permission_or_id) + try: + return cast("Permission", permission_or_id).permission_link + except AttributeError: + pass + return u"{}/permissions/{}".format(self.user_link, cast("Dict[str, str]", permission_or_id)["id"]) + + async def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = await self.read() + return self._properties + + @distributed_trace_async + async def read(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Read user propertes. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dictionary of the retrieved user properties. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + self._properties = await self.client_connection.ReadUser(user_link=self.user_link, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + def list_permissions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all permission for the user. + + :param max_item_count: Max number of permissions to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of permissions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadPermissions(user_link=self.user_link, options=feed_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + return result + + @distributed_trace + def query_permissions( + self, + query, + parameters=None, + max_item_count=None, + **kwargs + ): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all permissions matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of permissions to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of permissions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.QueryPermissions( + user_link=self.user_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + return result + + @distributed_trace_async + async def get_permission(self, permission, **kwargs): + # type: (str, Any) -> Permission + """Get the permission identified by `id`. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be retrieved. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the retrieved permission. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission_resp = await self.client_connection.ReadPermission( + permission_link=self._get_permission_link(permission), options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission_resp) + + return Permission( + id=permission_resp["id"], + user_link=self.user_link, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, + ) + + @distributed_trace_async + async def create_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """Create a permission for the user. + + To update or replace an existing permision, use the :func:`UserProxy.upsert_permission` method. + + :param body: A dict-like object representing the permission to create. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new permission. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission = await self.client_connection.CreatePermission( + user_link=self.user_link, permission=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission) + + return Permission( + id=permission["id"], + user_link=self.user_link, + permission_mode=permission["permissionMode"], + resource_link=permission["resource"], + properties=permission, + ) + + @distributed_trace_async + async def upsert_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """Insert or update the specified permission. + + If the permission already exists in the container, it is replaced. If + the permission does not exist, it is inserted. + + :param body: A dict-like object representing the permission to update or insert. + :param Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the upserted permission. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission could not be upserted. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission = await self.client_connection.UpsertPermission( + user_link=self.user_link, permission=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission) + + return Permission( + id=permission["id"], + user_link=self.user_link, + permission_mode=permission["permissionMode"], + resource_link=permission["resource"], + properties=permission, + ) + + @distributed_trace_async + async def replace_permission(self, permission, body, **kwargs): + # type: (str, Dict[str, Any], Any) -> Permission + """Replaces the specified permission if it exists for the user. + + If the permission does not already exist, an exception is raised. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be replaced. + :param body: A dict-like object representing the permission to replace. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the permission after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the permission + with given id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission_resp = await self.client_connection.ReplacePermission( + permission_link=self._get_permission_link(permission), permission=body, options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission_resp) + + return Permission( + id=permission_resp["id"], + user_link=self.user_link, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, + ) + + @distributed_trace_async + async def delete_permission(self, permission, **kwargs): + # type: (str, Any) -> None + """Delete the specified permission from the user. + + If the permission does not already exist, an exception is raised. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be replaced. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The permission wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The permission does not exist for the user. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = await self.client_connection.DeletePermission( + permission_link=self._get_permission_link(permission), options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py new file mode 100644 index 000000000000..bd3d0802a369 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py @@ -0,0 +1,248 @@ +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +from azure.cosmos.partition_key import PartitionKey +import azure.cosmos.documents as documents + +import asyncio +import config +import json + +# ---------------------------------------------------------------------------------------------------------- +# Prerequistes - +# +# 1. An Azure Cosmos account - +# https://docs.microsoft.com/azure/cosmos-db/create-sql-api-python#create-a-database-account +# +# 2. Microsoft Azure Cosmos +# pip install azure-cosmos>=4.0.0 +# ---------------------------------------------------------------------------------------------------------- +# Sample - how to get and use resource token that allows restricted access to data +# ---------------------------------------------------------------------------------------------------------- +# Note: +# +# This sample creates a Container to your database account. +# Each time a Container is created the account will be billed for 1 hour of usage based on +# the provisioned throughput (RU/s) of that account. +# ---------------------------------------------------------------------------------------------------------- +HOST = config.settings["host"] +MASTER_KEY = config.settings["master_key"] +DATABASE_ID = config.settings["database_id"] +CONTAINER_ID = config.settings["container_id"] +PARTITION_KEY = PartitionKey(path="/username") + +# User that you want to give access to +USERNAME, USERNAME_2 = "user", "user2" + +CONTAINER_ALL_PERMISSION = "CONTAINER_ALL_PERMISSION" +PARTITION_READ_PERMISSION = "PARTITION_READ_PERMISSION" +DOCUMENT_ALL_PERMISSION = "DOCUMENT_ALL_PERMISSION" + + +async def create_user_if_not_exists(db, username): + try: + user = await db.create_user(body={"id": username}) + except exceptions.CosmosResourceExistsError: + user = db.get_user_client(username) + + return user + + +async def create_permission_if_not_exists(user, permission_definition): + try: + permission = await user.create_permission(permission_definition) + except exceptions.CosmosResourceExistsError: + permission = await user.get_permission(permission_definition["id"]) + + return permission + + +async def token_client_upsert(container, username, item_id): + try: + await container.upsert_item( + { + "id": item_id, + "username": username, + "msg": "This is a message for " + username, + } + ) + except exceptions.CosmosHttpResponseError: + print("Error in upserting item with id '{0}'.".format(item_id)) + + +async def token_client_read_all(container): + try: + items = container.read_all_items() + async for i in items: + print(i) + except exceptions.CosmosResourceNotFoundError: + print("Cannot read items--container '{0}' not found.".format(container.id)) + except exceptions.CosmosHttpResponseError: + print("Error in reading items in container '{0}'.".format(container.id)) + + +async def token_client_read_item(container, username, item_id): + try: + item = await container.read_item(item=item_id, partition_key=username) + print(item) + except exceptions.CosmosResourceNotFoundError: + print("Cannot read--item with id '{0}' not found.".format(item_id)) + except exceptions.CosmosHttpResponseError: + print("Error in reading item with id '{0}'.".format(item_id)) + + +async def token_client_delete(container, username, item_id): + try: + await container.delete_item(item=item_id, partition_key=username) + except exceptions.CosmosResourceNotFoundError: + print("Cannot delete--item with id '{0}' not found.".format(item_id)) + except exceptions.CosmosHttpResponseError: + print("Error in deleting item with id '{0}'.".format(item_id)) + + +async def token_client_query(container, username): + try: + async for item in container.query_items( + query="SELECT * FROM my_container c WHERE c.username=@username", + parameters=[{"name": "@username", "value": username}], + partition_key=username, + ): + print(json.dumps(item, indent=True)) + except exceptions.CosmosHttpResponseError: + print("Error in querying item(s)") + + +async def run_sample(): + async with cosmos_client.CosmosClient(HOST, MASTER_KEY) as client: + + try: + try: + db = await client.create_database(DATABASE_ID) + except exceptions.CosmosResourceExistsError: + db = client.get_database_client(DATABASE_ID) + + try: + container = await db.create_container( + id=CONTAINER_ID, partition_key=PARTITION_KEY + ) + except exceptions.CosmosResourceExistsError: + container = db.get_container_client(CONTAINER_ID) + + user = await create_user_if_not_exists(db, USERNAME) + + # Permission to perform operations on all items inside a container + permission_definition = { + "id": CONTAINER_ALL_PERMISSION, + "permissionMode": documents.PermissionMode.All, + "resource": container.container_link, + } + + permission = await create_permission_if_not_exists(user, permission_definition) + token = {} + token[container.container_link] = permission.properties["_token"] + + # Use token to connect to database + # If you initialize the asynchronous client without using 'async with' in your context, + # make sure to close the client once you're done using it + token_client = cosmos_client.CosmosClient(HOST, token) + token_db = token_client.get_database_client(DATABASE_ID) + token_container = token_db.get_container_client(CONTAINER_ID) + + ITEM_1_ID, ITEM_2_ID, ITEM_3_ID = "1", "2", "3" + + # Update or insert item if not exists + await token_client_upsert(token_container, USERNAME, ITEM_1_ID) + await token_client_upsert(token_container, USERNAME, ITEM_2_ID) + await token_client_upsert(token_container, USERNAME_2, ITEM_3_ID) + + # Read all items in the container, across all partitions + await token_client_read_all(token_container) + + # Read specific item + await token_client_read_item(token_container, USERNAME, ITEM_2_ID) + + # Query for items in a certain partition + await token_client_query(token_container, USERNAME_2) + + # Delete an item + await token_client_delete(token_container, USERNAME, ITEM_2_ID) + + # Give user read-only permission, for a specific partition + user_2 = await create_user_if_not_exists(db, USERNAME_2) + permission_definition = { + "id": PARTITION_READ_PERMISSION, + "permissionMode": documents.PermissionMode.Read, + "resource": container.container_link, + "resourcePartitionKey": [USERNAME_2], + } + permission = await create_permission_if_not_exists(user_2, permission_definition) + read_token = {} + read_token[container.container_link] = permission.properties["_token"] + + # Closing current token client in order to re-initialize with read_token below: + await token_client.close() + + # Use token to connect to database + # If you initialize the asynchronous client without using 'async with' make sure to close it once you're done + token_client = cosmos_client.CosmosClient(HOST, read_token) + token_db = token_client.get_database_client(DATABASE_ID) + token_container = token_db.get_container_client(CONTAINER_ID) + + # Fails since this client has access to only items with partition key USERNAME_2 (ie. "user2") + await token_client_read_all(token_container) + + # Ok to read item(s) with partition key "user2" + await token_client_read_item(token_container, USERNAME_2, ITEM_3_ID) + + # Can't upsert or delete since it's read-only + await token_client_upsert(token_container, USERNAME_2, ITEM_3_ID) + + # Give user CRUD permissions, only for a specific item + item_3 = await token_container.read_item(item=ITEM_3_ID, partition_key=USERNAME_2) + permission_list = user_2.list_permissions() + async for p in permission_list: + await user_2.delete_permission(p.get('id')) + user_2_permissions = [permission async for permission in user_2.list_permissions()] + assert len(user_2_permissions) == 0 + + permission_definition = { + "id": DOCUMENT_ALL_PERMISSION, + "permissionMode": documents.PermissionMode.All, + "resource": item_3.get('_self') #this identifies the item with id "3" + } + + permission = await create_permission_if_not_exists(user_2, permission_definition) + + item_token = {} + item_token[container.container_link] = permission.properties["_token"] + + # Closing current token client in order to re-initialize with item_token below: + await token_client.close() + + # Use token to connect to database + token_client = cosmos_client.CosmosClient(HOST, item_token) + token_db = token_client.get_database_client(DATABASE_ID) + token_container = token_db.get_container_client(CONTAINER_ID) + + # Fails since this client only has access to a specific item + await token_client_read_all(token_container) + + # Fails too, for same reason + await token_client_read_item(token_container, USERNAME, ITEM_1_ID) + + # Ok to perform operations on that specific item + await token_client_read_item(token_container, USERNAME_2, ITEM_3_ID) + await token_client_delete(token_container, USERNAME_2, ITEM_3_ID) + + # Closing current token client + await token_client.close() + + except exceptions.CosmosHttpResponseError as e: + print("\nrun_sample has caught an error. {0}".format(e.message)) + + finally: + print("\nrun_sample done") + + +if __name__ == "__main__": + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) diff --git a/sdk/cosmos/azure-cosmos/samples/change_feed_management.py b/sdk/cosmos/azure-cosmos/samples/change_feed_management.py index e12d27db0994..a3149bec21b9 100644 --- a/sdk/cosmos/azure-cosmos/samples/change_feed_management.py +++ b/sdk/cosmos/azure-cosmos/samples/change_feed_management.py @@ -2,7 +2,6 @@ import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos.exceptions as exceptions import azure.cosmos.partition_key as partition_key -import datetime import uuid import config diff --git a/sdk/cosmos/azure-cosmos/samples/change_feed_management_async.py b/sdk/cosmos/azure-cosmos/samples/change_feed_management_async.py new file mode 100644 index 000000000000..027e1a773d93 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/change_feed_management_async.py @@ -0,0 +1,99 @@ +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +import azure.cosmos.documents as documents +import azure.cosmos.partition_key as partition_key +import uuid + +import asyncio +import config + +# ---------------------------------------------------------------------------------------------------------- +# Prerequistes - +# +# 1. An Azure Cosmos account - +# https:#azure.microsoft.com/en-us/documentation/articles/documentdb-create-account/ +# +# 2. Microsoft Azure Cosmos PyPi package - +# https://pypi.python.org/pypi/azure-cosmos/ +# ---------------------------------------------------------------------------------------------------------- +# Sample - demonstrates how to consume the Change Feed and iterate on the results. +# ---------------------------------------------------------------------------------------------------------- + +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] +CONTAINER_ID = config.settings['container_id'] + + +async def create_items(container, size): + print('Creating Items') + + for i in range(1, size): + c = str(uuid.uuid4()) + item_definition = {'id': 'item' + c, + 'address': {'street': '1 Microsoft Way'+c, + 'city': 'Redmond'+c, + 'state': 'WA', + 'zip code': 98052 + } + } + + await container.create_item(body=item_definition) + + +async def read_change_feed(container): + print('\nReading Change Feed from the beginning\n') + + # For a particular Partition Key Range we can use partition_key_range_id] + # 'is_start_from_beginning = True' will read from the beginning of the history of the container + # If no is_start_from_beginning is specified, the read change feed loop will pickup the items that happen while the loop / process is active + response = container.query_items_change_feed(is_start_from_beginning=True) + + # Because the asynchronous client returns an asynchronous iterator object for methods using queries, + # we do not need to await the function. However, attempting to cast this object into a list directly + # will throw an error; instead, iterate over the result using an async for loop like shown here + async for doc in response: + print(doc) + + print('\nFinished reading all the change feed\n') + + +async def run_sample(): + async with cosmos_client.CosmosClient(HOST, MASTER_KEY) as client: + try: + # setup database for this sample + try: + db = await client.create_database(id=DATABASE_ID) + except exceptions.CosmosResourceExistsError: + raise RuntimeError("Database with id '{}' already exists".format(DATABASE_ID)) + + # setup container for this sample + try: + container = await db.create_container( + id=CONTAINER_ID, + partition_key=partition_key.PartitionKey(path='/address/state', kind=documents.PartitionKind.Hash) + ) + print('Container with id \'{0}\' created'.format(CONTAINER_ID)) + + except exceptions.CosmosResourceExistsError: + raise RuntimeError("Container with id '{}' already exists".format(CONTAINER_ID)) + + await create_items(container, 100) + await read_change_feed(container) + + # cleanup database after sample + try: + await client.delete_database(db) + except exceptions.CosmosResourceNotFoundError: + pass + + except exceptions.CosmosHttpResponseError as e: + print('\nrun_sample has caught an error. {0}'.format(e.message)) + + finally: + print("\nrun_sample done") + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) diff --git a/sdk/cosmos/azure-cosmos/samples/container_management.py b/sdk/cosmos/azure-cosmos/samples/container_management.py index ff29c8b3e454..6ec53a68f136 100644 --- a/sdk/cosmos/azure-cosmos/samples/container_management.py +++ b/sdk/cosmos/azure-cosmos/samples/container_management.py @@ -212,6 +212,7 @@ def read_Container(db, id): try: container = db.get_container_client(id) + container.read() print('Container with id \'{0}\' was found, it\'s link is {1}'.format(container.id, container.container_link)) except exceptions.CosmosResourceNotFoundError: diff --git a/sdk/cosmos/azure-cosmos/samples/container_management_async.py b/sdk/cosmos/azure-cosmos/samples/container_management_async.py new file mode 100644 index 000000000000..63d2ae695283 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/container_management_async.py @@ -0,0 +1,317 @@ +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +from azure.cosmos.partition_key import PartitionKey + +import asyncio +import config + +# ---------------------------------------------------------------------------------------------------------- +# Prerequistes - +# +# 1. An Azure Cosmos account - +# https://azure.microsoft.com/en-us/documentation/articles/documentdb-create-account/ +# +# 2. Microsoft Azure Cosmos PyPi package - +# https://pypi.python.org/pypi/azure-cosmos/ +# ---------------------------------------------------------------------------------------------------------- +# Sample - demonstrates the basic CRUD operations on a Container resource for Azure Cosmos +# +# 1. Query for Container +# +# 2. Create Container +# 2.1 - Basic Create +# 2.2 - Create container with custom IndexPolicy +# 2.3 - Create container with provisioned throughput set +# 2.4 - Create container with unique key +# 2.5 - Create Container with partition key V2 +# 2.6 - Create Container with partition key V1 +# 2.7 - Create Container with analytical store enabled +# +# 3. Manage Container Provisioned Throughput +# 3.1 - Get Container provisioned throughput (RU/s) +# 3.2 - Change provisioned throughput (RU/s) +# +# 4. Get a Container by its Id property +# +# 5. List all Container resources in a Database +# +# 6. Delete Container +# ---------------------------------------------------------------------------------------------------------- +# Note - +# +# Running this sample will create (and delete) multiple Containers on your account. +# Each time a Container is created the account will be billed for 1 hour of usage based on +# the provisioned throughput (RU/s) of that account. +# ---------------------------------------------------------------------------------------------------------- + +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] +CONTAINER_ID = config.settings['container_id'] + +async def find_container(db, id): + print('1. Query for Container') + + # Because the asynchronous client returns an asynchronous iterator object for methods that use + # return several containers using queries, we do not need to await the function. However, attempting + # to cast this object into a list directly will throw an error; instead, iterate over the containers + # to populate your list using an async for loop like shown here or in the list_containers() method + query_containers_response = db.query_containers( + { + "query": "SELECT * FROM r WHERE r.id=@id", + "parameters": [ + { "name":"@id", "value": id } + ] + } + ) + containers = [container async for container in query_containers_response] + + if len(containers) > 0: + print('Container with id \'{0}\' was found'.format(id)) + else: + print('No container with id \'{0}\' was found'. format(id)) + + # Alternatively, you can directly iterate over the asynchronous iterator without building a separate + # list if you don't need the ordering or indexing capabilities + async for container in query_containers_response: + print(container['id']) + + +async def create_container(db, id): + """ Execute basic container creation. + This will create containers with 400 RUs with different indexing, partitioning, and storage options """ + + partition_key = PartitionKey(path='/id', kind='Hash') + print("\n2.1 Create Container - Basic") + + try: + await db.create_container(id=id, partition_key=partition_key) + print('Container with id \'{0}\' created'.format(id)) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(id)) + + # Alternatively, you can also use the create_container_if_not_exists method to avoid using a try catch + # This method attempts to read the container first, and based on the result either creates or returns + # the existing container. Due to the additional overhead from attempting a read, it is recommended + # to use the create_container() method if you know the container doesn't already exist. + await db.create_container_if_not_exists(id=id, partition_key=partition_key) + + print("\n2.2 Create Container - With custom index policy") + + coll = { + "id": id+"_container_custom_index_policy", + "indexingPolicy": { + "automatic": False + } + } + + container = await db.create_container_if_not_exists( + id=coll['id'], + partition_key=partition_key, + indexing_policy=coll['indexingPolicy'] + ) + properties = await container.read() + print('Container with id \'{0}\' created'.format(container.id)) + print('IndexPolicy Mode - \'{0}\''.format(properties['indexingPolicy']['indexingMode'])) + print('IndexPolicy Automatic - \'{0}\''.format(properties['indexingPolicy']['automatic'])) + + print("\n2.3 Create Container - With custom provisioned throughput") + + try: + container = await db.create_container( + id=id+"_container_custom_throughput", + partition_key=partition_key, + offer_throughput=400 + ) + print('Container with id \'{0}\' created'.format(container.id)) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) + + print("\n2.4 Create Container - With Unique keys") + + try: + container = await db.create_container( + id= id+"_container_unique_keys", + partition_key=partition_key, + unique_key_policy={'uniqueKeys': [{'paths': ['/field1/field2', '/field3']}]} + ) + properties = await container.read() + unique_key_paths = properties['uniqueKeyPolicy']['uniqueKeys'][0]['paths'] + print('Container with id \'{0}\' created'.format(container.id)) + print('Unique Key Paths - \'{0}\', \'{1}\''.format(unique_key_paths[0], unique_key_paths[1])) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'container_unique_keys\' already exists') + + print("\n2.5 Create Container - With Partition key V2 (Default)") + + try: + container = await db.create_container( + id=id+"_container_partition_key_v2", + partition_key=PartitionKey(path='/id', kind='Hash') + ) + properties = await container.read() + print('Container with id \'{0}\' created'.format(container.id)) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'container_partition_key_v2\' already exists') + + print("\n2.6 Create Container - With Partition key V1") + + try: + container = await db.create_container( + id=id+"_container_partition_key_v1", + partition_key=PartitionKey(path='/id', kind='Hash', version=1) + ) + properties = await container.read() + print('Container with id \'{0}\' created'.format(container.id)) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'container_partition_key_v1\' already exists') + except Exception: + print("Skipping this step, account does not have Synapse Link activated") + + print("\n2.7 Create Container - With analytical store enabled") + + if 'localhost:8081' in HOST: + print("Skipping step since emulator does not support this yet") + else: + try: + container = await db.create_container( + id=id+"_container_analytical_store", + partition_key=PartitionKey(path='/id', kind='Hash'),analytical_storage_ttl=-1 + + ) + properties = await container.read() + print('Container with id \'{0}\' created'.format(container.id)) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'_container_analytical_store\' already exists') + except Exception: + print('Creating container with analytical storage can only happen in synapse link activated accounts, skipping step') + + + +async def manage_provisioned_throughput(db, id): + print("\n3.1 Get Container provisioned throughput (RU/s)") + + # A Container's Provisioned Throughput determines the performance throughput of a container. + # A Container is loosely coupled to Offer through the Offer's offerResourceId + # Offer.offerResourceId == Container._rid + # Offer.resource == Container._self + + try: + # read the container, so we can get its _self + container = db.get_container_client(container=id) + + # now use its _self to query for Offers + offer = await container.read_offer() + + print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(offer.properties['id'], container.id, offer.properties['content']['offerThroughput'])) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'{0}\' does not exist'.format(id)) + + print("\n3.2 Change Provisioned Throughput of Container") + + #The Provisioned Throughput of a container controls the throughput allocated to the Container + + #The following code shows how you can change Container's throughput + offer = await container.replace_throughput(offer.offer_throughput + 100) + print('Replaced Offer. Provisioned Throughput is now \'{0}\''.format(offer.properties['content']['offerThroughput'])) + + +async def read_container(db, id): + print("\n4. Get a Container by id") + + try: + container = db.get_container_client(id) + await container.read() + print('Container with id \'{0}\' was found, it\'s link is {1}'.format(container.id, container.container_link)) + + except exceptions.CosmosResourceNotFoundError: + print('A container with id \'{0}\' does not exist'.format(id)) + + +async def list_containers(db): + print("\n5. List all Container in a Database") + + print('Containers:') + + # Because the asynchronous client returns an asynchronous iterator object for methods that use + # return several containers using queries, we do not need to await the function. However, attempting + # to cast this object into a list directly will throw an error; instead, iterate over the containers + # to populate your list using an async for loop like shown here or in the find_container() method + container_list = db.list_containers() + containers = [container async for container in container_list] + + if len(containers) == 0: + return + + for container in containers: + print(container['id']) + + # Alternitavely, you can directly iterate over the asynchronous iterator without building a separate + # list if you don't need the ordering or indexing capabilities + async for container in container_list: + print(container['id']) + + +async def delete_container(db, id): + print("\n6. Delete Container") + + try: + await db.delete_container(id) + print('Container with id \'{0}\' was deleted'.format(id)) + + except exceptions.CosmosResourceNotFoundError: + print('A container with id \'{0}\' does not exist'.format(id)) + + +async def run_sample(): + + async with cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY}) as client: + try: + db = await client.create_database_if_not_exists(id=DATABASE_ID) + + # query for a container + await find_container(db, CONTAINER_ID) + + # create a container + await create_container(db, CONTAINER_ID) + + # get & change Provisioned Throughput of container + await manage_provisioned_throughput(db, CONTAINER_ID) + + # get a container using its id + await read_container(db, CONTAINER_ID) + + # list all container on an account + await list_containers(db) + + # delete container by id + await delete_container(db, CONTAINER_ID) + + # cleanup database after sample + try: + await client.delete_database(db) + + except exceptions.CosmosResourceNotFoundError: + pass + + except exceptions.CosmosHttpResponseError as e: + print('\nrun_sample has caught an error. {0}'.format(e.message)) + + finally: + print("\nrun_sample done") + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) + diff --git a/sdk/cosmos/azure-cosmos/samples/database_management.py b/sdk/cosmos/azure-cosmos/samples/database_management.py index fe5017c5684b..31ccce6b2ba5 100644 --- a/sdk/cosmos/azure-cosmos/samples/database_management.py +++ b/sdk/cosmos/azure-cosmos/samples/database_management.py @@ -61,6 +61,7 @@ def read_database(client, id): try: database = client.get_database_client(id) + database.read() print('Database with id \'{0}\' was found, it\'s link is {1}'.format(id, database.database_link)) except exceptions.CosmosResourceNotFoundError: diff --git a/sdk/cosmos/azure-cosmos/samples/database_management_async.py b/sdk/cosmos/azure-cosmos/samples/database_management_async.py new file mode 100644 index 000000000000..97da4530bc56 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/database_management_async.py @@ -0,0 +1,150 @@ +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions + +import asyncio +import config + +# ---------------------------------------------------------------------------------------------------------- +# Prerequistes - +# +# 1. An Azure Cosmos account - +# https://docs.microsoft.com/azure/cosmos-db/create-sql-api-python#create-a-database-account +# +# 2. Microsoft Azure Cosmos PyPi package - +# https://pypi.python.org/pypi/azure-cosmos/ +# ---------------------------------------------------------------------------------------------------------- +# Sample - demonstrates the basic CRUD operations on a Database resource for Azure Cosmos +# +# 1. Query for Database (QueryDatabases) +# +# 2. Create Database (CreateDatabase) +# +# 3. Get a Database by its Id property (ReadDatabase) +# +# 4. List all Database resources on an account (ReadDatabases) +# +# 5. Delete a Database given its Id property (DeleteDatabase) +# ---------------------------------------------------------------------------------------------------------- + +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] + +async def find_database(client, id): + print('1. Query for Database') + + # Because the asynchronous client returns an asynchronous iterator object for methods that use + # return several databases using queries, we do not need to await the function. However, attempting + # to cast this object into a list directly will throw an error; instead, iterate over the databases + # to populate your list using an async for loop like shown here or in the list_databases() method + query_databases_response = client.query_databases({ + "query": "SELECT * FROM r WHERE r.id=@id", + "parameters": [ + { "name":"@id", "value": id } + ] + }) + + databases = [database async for database in query_databases_response] + + if len(databases) > 0: + print('Database with id \'{0}\' was found'.format(id)) + else: + print('No database with id \'{0}\' was found'. format(id)) + + # Alternitavely, you can directly iterate over the asynchronous iterator without building a separate + # list if you don't need the ordering or indexing capabilities + async for database in query_databases_response: + print(database['id']) + + +async def create_database(client, id): + print("\n2. Create Database") + + try: + await client.create_database(id=id) + print('Database with id \'{0}\' created'.format(id)) + + except exceptions.CosmosResourceExistsError: + print('A database with id \'{0}\' already exists'.format(id)) + + # Alternatively, you can also use the create_database_if_not_exists method to avoid using a try catch + # This method attempts to read the database first, and based on the result either creates or returns + # the existing database. Due to the additional overhead from attempting a read, it is recommended + # to use the create_database() method if you know the database doesn't already exist. + await client.create_database_if_not_exists(id=id) + + +async def read_database(client, id): + print("\n3. Get a Database by id") + + try: + database = client.get_database_client(id) + await database.read() + print('Database with id \'{0}\' was found, it\'s link is {1}'.format(id, database.database_link)) + + except exceptions.CosmosResourceNotFoundError: + print('A database with id \'{0}\' does not exist'.format(id)) + + +async def list_databases(client): + print("\n4. List all Databases on an account") + + print('Databases:') + + # Because the asynchronous client returns an asynchronous iterator object for methods that use + # return several databases using queries, we do not need to await the function. However, attempting + # to cast this object into a list directly will throw an error; instead, iterate over the databases + # to populate your list using an async for loop like shown here or in the find_database() method + list_databases_response = client.list_databases() + databases = [database async for database in list_databases_response] + + if len(databases) == 0: + return + + for database in databases: + print(database['id']) + + # Alternitavely, you can directly iterate over the asynchronous iterator without building a separate + # list if you don't need the ordering or indexing capabilities + async for database in list_databases_response: + print(database['id']) + + +async def delete_database(client, id): + print("\n5. Delete Database") + + try: + await client.delete_database(id) + print('Database with id \'{0}\' was deleted'.format(id)) + + except exceptions.CosmosResourceNotFoundError: + print('A database with id \'{0}\' does not exist'.format(id)) + + +async def run_sample(): + async with cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY}) as client: + try: + # query for a database + await find_database(client, DATABASE_ID) + + # create a database + await create_database(client, DATABASE_ID) + + # get a database using its id + await read_database(client, DATABASE_ID) + + # list all databases on an account + await list_databases(client) + + # delete database by id + await delete_database(client, DATABASE_ID) + + except exceptions.CosmosHttpResponseError as e: + print('\nrun_sample has caught an error. {0}'.format(e.message)) + + finally: + print("\nrun_sample done") + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) diff --git a/sdk/cosmos/azure-cosmos/samples/document_management_async.py b/sdk/cosmos/azure-cosmos/samples/document_management_async.py new file mode 100644 index 000000000000..523af364e4c6 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/document_management_async.py @@ -0,0 +1,208 @@ +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +from azure.cosmos.partition_key import PartitionKey +import datetime + +import asyncio +import config + +# ---------------------------------------------------------------------------------------------------------- +# Prerequistes - +# +# 1. An Azure Cosmos account - +# https:#azure.microsoft.com/en-us/documentation/articles/documentdb-create-account/ +# +# 2. Microsoft Azure Cosmos PyPi package - +# https://pypi.python.org/pypi/azure-cosmos/ +# ---------------------------------------------------------------------------------------------------------- +# Sample - demonstrates the basic CRUD operations on a Item resource for Azure Cosmos +# ---------------------------------------------------------------------------------------------------------- + +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] +CONTAINER_ID = config.settings['container_id'] + + +async def create_items(container): + print('Creating Items') + print('\n1.1 Create Item\n') + + # Create a SalesOrder object. This object has nested properties and various types including numbers, DateTimes and strings. + # This can be saved as JSON as is without converting into rows/columns. + sales_order = get_sales_order("SalesOrder1") + await container.create_item(body=sales_order) + + # As your app evolves, let's say your object has a new schema. You can insert SalesOrderV2 objects without any + # changes to the database tier. + sales_order2 = get_sales_order_v2("SalesOrder2") + await container.create_item(body=sales_order2) + + +async def read_item(container, doc_id): + print('\n1.2 Reading Item by Id\n') + + # Note that Reads require a partition key to be spcified. + response = await container.read_item(item=doc_id, partition_key=doc_id) + + print('Item read by Id {0}'.format(doc_id)) + print('Account Number: {0}'.format(response.get('account_number'))) + print('Subtotal: {0}'.format(response.get('subtotal'))) + + +async def read_items(container): + print('\n1.3 - Reading all items in a container\n') + + # NOTE: Use MaxItemCount on Options to control how many items come back per trip to the server + # Important to handle throttles whenever you are doing operations such as this that might + # result in a 429 (throttled request) + read_all_items_response = container.read_all_items(max_item_count=10) + + # Because the asynchronous client returns an asynchronous iterator object for methods that use + # return several items using queries, we do not need to await the function. However, attempting + # to cast this object into a list directly will throw an error; instead, iterate over the items + # using an async for loop like shown here and in the query_items() method below + item_list = [item async for item in read_all_items_response] + + print('Found {0} items'.format(item_list.__len__())) + + for doc in item_list: + print('Item Id: {0}'.format(doc.get('id'))) + + # Alternitavely, you can directly iterate over the asynchronous iterator without building a separate + # list if you don't need the ordering or indexing capabilities + async for item in read_all_items_response: + print(item.get('id')) + + +async def query_items(container, doc_id): + print('\n1.4 Querying for an Item by Id\n') + + # enable_cross_partition_query should be set to True as the container is partitioned + query_items_response = container.query_items( + query="SELECT * FROM r WHERE r.id=@id", + parameters=[ + { "name":"@id", "value": doc_id } + ], + enable_cross_partition_query=True + ) + + items = [item async for item in query_items_response] + + print('Item queried by Id {0}'.format(items[0].get("id"))) + + +async def replace_item(container, doc_id): + print('\n1.5 Replace an Item\n') + + read_item = await container.read_item(item=doc_id, partition_key=doc_id) + read_item['subtotal'] = read_item['subtotal'] + 1 + response = await container.replace_item(item=read_item, body=read_item) + + print('Replaced Item\'s Id is {0}, new subtotal={1}'.format(response['id'], response['subtotal'])) + + +async def upsert_item(container, doc_id): + print('\n1.6 Upserting an item\n') + + read_item = await container.read_item(item=doc_id, partition_key=doc_id) + read_item['subtotal'] = read_item['subtotal'] + 1 + response = await container.upsert_item(body=read_item) + + print('Upserted Item\'s Id is {0}, new subtotal={1}'.format(response['id'], response['subtotal'])) + + +async def delete_item(container, doc_id): + print('\n1.7 Deleting Item by Id\n') + + await container.delete_item(item=doc_id, partition_key=doc_id) + + print('Deleted item\'s Id is {0}'.format(doc_id)) + + +def get_sales_order(item_id): + order1 = {'id' : item_id, + 'account_number' : 'Account1', + 'purchase_order_number' : 'PO18009186470', + 'order_date' : datetime.date(2005,1,10).strftime('%c'), + 'subtotal' : 419.4589, + 'tax_amount' : 12.5838, + 'freight' : 472.3108, + 'total_due' : 985.018, + 'items' : [ + {'order_qty' : 1, + 'product_id' : 100, + 'unit_price' : 418.4589, + 'line_price' : 418.4589 + } + ], + 'ttl' : 60 * 60 * 24 * 30 + } + + return order1 + + +def get_sales_order_v2(item_id): + # notice new fields have been added to the sales order + order2 = {'id' : item_id, + 'account_number' : 'Account2', + 'purchase_order_number' : 'PO15428132599', + 'order_date' : datetime.date(2005,7,11).strftime('%c'), + 'due_date' : datetime.date(2005,7,21).strftime('%c'), + 'shipped_date' : datetime.date(2005,7,15).strftime('%c'), + 'subtotal' : 6107.0820, + 'tax_amount' : 586.1203, + 'freight' : 183.1626, + 'discount_amt' : 1982.872, + 'total_due' : 4893.3929, + 'items' : [ + {'order_qty' : 3, + 'product_code' : 'A-123', # notice how in item details we no longer reference a ProductId + 'product_name' : 'Product 1', # instead we have decided to denormalise our schema and include + 'currency_symbol' : '$', # the Product details relevant to the Order on to the Order directly + 'currecny_code' : 'USD', # this is a typical refactor that happens in the course of an application + 'unit_price' : 17.1, # that would have previously required schema changes and data migrations etc. + 'line_price' : 5.7 + } + ], + 'ttl' : 60 * 60 * 24 * 30 + } + + return order2 + +async def run_sample(): + async with cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY}) as client: + try: + # setup database for this sample + db = await client.create_database_if_not_exists(id=DATABASE_ID) + + # setup container for this sample + container = await db.create_container_if_not_exists(id=CONTAINER_ID, partition_key=PartitionKey(path='/id', kind='Hash')) + print('Container with id \'{0}\' created'.format(CONTAINER_ID)) + + await create_items(container) + await read_item(container, 'SalesOrder1') + await read_items(container) + await query_items(container, 'SalesOrder1') + await replace_item(container, 'SalesOrder1') + await upsert_item(container, 'SalesOrder1') + await delete_item(container, 'SalesOrder1') + + # cleanup database after sample + try: + await client.delete_database(db) + + except exceptions.CosmosResourceNotFoundError: + pass + + except exceptions.CosmosHttpResponseError as e: + print('\nrun_sample has caught an error. {0}'.format(e.message)) + + finally: + print("\nrun_sample done") + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) + diff --git a/sdk/cosmos/azure-cosmos/samples/index_management.py b/sdk/cosmos/azure-cosmos/samples/index_management.py index 810c312beecb..5cbabf856bd9 100644 --- a/sdk/cosmos/azure-cosmos/samples/index_management.py +++ b/sdk/cosmos/azure-cosmos/samples/index_management.py @@ -9,9 +9,9 @@ import config -HOST = config.settings['host'] -MASTER_KEY = config.settings['master_key'] -DATABASE_ID = config.settings['database_id'] +HOST = 'https://simonmoreno-sql3.documents.azure.com:443/' +MASTER_KEY = 'Bl5eNK9aXEtvqrrPJPLs2gvCdllIZr2Dvm7dmsiVfgrznrlO2CHxSVODgy1ROxr33heMMTEVMp1eEuBFW6jHgw==' +DATABASE_ID = 'newdb' CONTAINER_ID = "index-samples" PARTITION_KEY = PartitionKey(path='/id', kind='Hash') diff --git a/sdk/cosmos/azure-cosmos/samples/index_management_async.py b/sdk/cosmos/azure-cosmos/samples/index_management_async.py new file mode 100644 index 000000000000..13bace9b98a3 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/index_management_async.py @@ -0,0 +1,667 @@ +import sys +sys.path.append(r"C:\Users\simonmoreno\Repos\azure-sdk-for-python\sdk\cosmos\azure-cosmos") + +import azure.cosmos.documents as documents +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +from azure.cosmos.partition_key import PartitionKey +import urllib3 +from requests.utils import DEFAULT_CA_BUNDLE_PATH as CaCertPath + +import asyncio +import config + +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] +CONTAINER_ID = "index-samples" +PARTITION_KEY = PartitionKey(path='/id', kind='Hash') + +# A typical container has the following properties within it's indexingPolicy property +# indexingMode +# automatic +# includedPaths +# excludedPaths +# +# We can toggle 'automatic' to eiher be True or False depending upon whether we want to have indexing over all columns by default or not. +# +# We can provide options while creating documents. indexingDirective is one such, +# by which we can tell whether it should be included or excluded in the index of the parent container. +# indexingDirective can be either 'Include', 'Exclude' or 'Default' + + +# To run this Demo, please provide your own CA certs file or download one from +# http://curl.haxx.se/docs/caextract.html +# Setup the certificate file in .pem format. +# If you still get an SSLError, try disabling certificate verification and suppress warnings + +find_entity_by_id_query = { + "query": "SELECT * FROM r WHERE r.id=@id", + "parameters": [ + { "name":"@id", "value": id } + ] + } + +def obtain_client(): + # Try to setup the cacert.pem + # connection_policy.SSLConfiguration.SSLCaCerts = CaCertPath + # Else, disable verification + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + return cosmos_client.CosmosClient(HOST, MASTER_KEY) + + +# Query for Entity / Entities +async def query_entities(parent, entity_type, id = None): + find_entity_by_id_query = { + "query": "SELECT * FROM r WHERE r.id=@id", + "parameters": [ + { "name":"@id", "value": id } + ] + } + entities = None + try: + if entity_type == 'database': + if id == None: + entities = [entity async for entity in parent.list_databases()] + else: + entities = [entity async for entity in parent.query_databases(find_entity_by_id_query)] + + elif entity_type == 'container': + if id == None: + entities = [entity async for entity in parent.list_containers()] + else: + entities = [entity async for entity in parent.query_containers(find_entity_by_id_query)] + + elif entity_type == 'document': + if id == None: + entities = [entity async for entity in parent.read_all_items()] + else: + entities = [entity async for entity in parent.query_items(find_entity_by_id_query)] + except exceptions.AzureError as e: + print("The following error occured while querying for the entity / entities ", entity_type, id if id != None else "") + print(e) + raise + if id == None: + return entities + if len(entities) == 1: + return entities[0] + return None + + +async def delete_container_if_exists(db, container_id): + try: + await db.delete_container(container_id) + print('Container with id \'{0}\' was deleted'.format(container_id)) + except exceptions.CosmosResourceNotFoundError: + pass + except exceptions.CosmosHttpResponseError as e: + if e.status_code == 400: + print("Bad request for container link", container_id) + raise + + +def print_dictionary_items(dict): + for k, v in dict.items(): + print("{:<15}".format(k), v) + print() + + +async def fetch_all_databases(client): + databases = await query_entities(client, 'database') + print("-" * 41) + print("-" * 41) + for db in databases: + print_dictionary_items(db) + print("-" * 41) + + +async def query_documents_with_custom_query(container, query_with_optional_parameters, message = "Document(s) found by query: "): + try: + results = container.query_items(query_with_optional_parameters, enable_cross_partition_query=True) + print(message) + async for doc in results: + print(doc) + return results + except exceptions.CosmosResourceNotFoundError: + print("Document doesn't exist") + except exceptions.CosmosHttpResponseError as e: + if e.status_code == 400: + # Can occur when we are trying to query on excluded paths + print("Bad Request exception occured: ", e) + pass + else: + raise + finally: + print() + + +async def explicitly_exclude_from_index(db): + """ The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. + There may be scenarios where you want to exclude a specific doc from the index even though all other + documents are being indexed automatically. + This method demonstrates how to use an index directive to control this + + """ + try: + await delete_container_if_exists(db, CONTAINER_ID) + + # Create a container with default index policy (i.e. automatic = true) + created_Container = await db.create_container(id=CONTAINER_ID, partition_key=PARTITION_KEY) + print(created_Container) + + print("\n" + "-" * 25 + "\n1. Container created with index policy") + properties = await created_Container.read() + print_dictionary_items(properties["indexingPolicy"]) + + # Create a document and query on it immediately. + # Will work as automatic indexing is still True + doc = await created_Container.create_item(body={ "id" : "doc1", "orderId" : "order1" }) + print("\n" + "-" * 25 + "Document doc1 created with order1" + "-" * 25) + print(doc) + + query = { + "query": "SELECT * FROM r WHERE r.orderId=@orderNo", + "parameters": [ { "name":"@orderNo", "value": "order1" } ] + } + await query_documents_with_custom_query(created_Container, query) + + # Now, create a document but this time explictly exclude it from the container using IndexingDirective + # Then query for that document + # Shoud NOT find it, because we excluded it from the index + # BUT, the document is there and doing a ReadDocument by Id will prove it + doc2 = await created_Container.create_item( + body={ "id" : "doc2", "orderId" : "order2" }, + indexing_directive=documents.IndexingDirective.Exclude + ) + print("\n" + "-" * 25 + "Document doc2 created with order2" + "-" * 25) + print(doc2) + + query = { + "query": "SELECT * FROM r WHERE r.orderId=@orderNo", + "parameters": [ { "name":"@orderNo", "value": "order2" } ] + } + await query_documents_with_custom_query(created_Container, query) + + docRead = await created_Container.read_item(item="doc2", partition_key="doc2") + print("Document read by ID: \n", docRead["id"]) + + # Cleanup + await db.delete_container(created_Container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def use_manual_indexing(db): + """The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. + There may be cases where you can want to turn-off automatic indexing and only selectively add only specific documents to the index. + This method demonstrates how to control this by setting the value of automatic within indexingPolicy to False + + """ + try: + await delete_container_if_exists(db, CONTAINER_ID) + + # Create a container with manual (instead of automatic) indexing + created_Container = await db.create_container( + id=CONTAINER_ID, + indexing_policy={"automatic" : False}, + partition_key=PARTITION_KEY + ) + properties = await created_Container.read() + print(created_Container) + + print("\n" + "-" * 25 + "\n2. Container created with index policy") + print_dictionary_items(properties["indexingPolicy"]) + + # Create a document + # Then query for that document + # We should find nothing, because automatic indexing on the container level is False + # BUT, the document is there and doing a ReadDocument by Id will prove it + doc = await created_Container.create_item(body={ "id" : "doc1", "orderId" : "order1" }) + print("\n" + "-" * 25 + "Document doc1 created with order1" + "-" * 25) + print(doc) + + query = { + "query": "SELECT * FROM r WHERE r.orderId=@orderNo", + "parameters": [ { "name":"@orderNo", "value": "order1" } ] + } + await query_documents_with_custom_query(created_Container, query) + + docRead = await created_Container.read_item(item="doc1", partition_key="doc1") + print("Document read by ID: \n", docRead["id"]) + + # Now create a document, passing in an IndexingDirective saying we want to specifically index this document + # Query for the document again and this time we should find it because we manually included the document in the index + doc2 = await created_Container.create_item( + body={ "id" : "doc2", "orderId" : "order2" }, + indexing_directive=documents.IndexingDirective.Include + ) + print("\n" + "-" * 25 + "Document doc2 created with order2" + "-" * 25) + print(doc2) + + query = { + "query": "SELECT * FROM r WHERE r.orderId=@orderNo", + "parameters": [ { "name":"@orderNo", "value": "order2" } ] + } + await query_documents_with_custom_query(created_Container, query) + + # Cleanup + db.delete_container(created_Container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def exclude_paths_from_index(db): + """The default behavior is for Cosmos to index every attribute in every document automatically. + There are times when a document contains large amounts of information, in deeply nested structures + that you know you will never search on. In extreme cases like this, you can exclude paths from the + index to save on storage cost, improve write performance and also improve read performance because the index is smaller + + This method demonstrates how to set excludedPaths within indexingPolicy + """ + try: + await delete_container_if_exists(db, CONTAINER_ID) + + doc_with_nested_structures = { + "id" : "doc1", + "foo" : "bar", + "metaData" : "meta", + "subDoc" : { "searchable" : "searchable", "nonSearchable" : "value" }, + "excludedNode" : { "subExcluded" : "something", "subExcludedNode" : { "someProperty" : "value" } } + } + container_to_create = { "id" : CONTAINER_ID , + "indexingPolicy" : + { + "includedPaths" : [ {'path' : "/*"} ], # Special mandatory path of "/*" required to denote include entire tree + "excludedPaths" : [ {'path' : "/metaData/*"}, # exclude metaData node, and anything under it + {'path' : "/subDoc/nonSearchable/*"}, # exclude ONLY a part of subDoc + {'path' : "/\"excludedNode\"/*"} # exclude excludedNode node, and anything under it + ] + } + } + print(container_to_create) + print(doc_with_nested_structures) + # Create a container with the defined properties + # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed + created_Container = await db.create_container( + id=container_to_create['id'], + indexing_policy=container_to_create['indexingPolicy'], + partition_key=PARTITION_KEY + ) + properties = await created_Container.read() + print(created_Container) + print("\n" + "-" * 25 + "\n4. Container created with index policy") + print_dictionary_items(properties["indexingPolicy"]) + + # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed + doc = await created_Container.create_item(body=doc_with_nested_structures) + print("\n" + "-" * 25 + "Document doc1 created with nested structures" + "-" * 25) + print(doc) + + # Querying for a document on either metaData or /subDoc/subSubDoc/someProperty > fail because these paths were excluded and they raise a BadRequest(400) Exception + query = {"query": "SELECT * FROM r WHERE r.metaData=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "meta" }]} + await query_documents_with_custom_query(created_Container, query) + + query = {"query": "SELECT * FROM r WHERE r.subDoc.nonSearchable=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "value" }]} + await query_documents_with_custom_query(created_Container, query) + + query = {"query": "SELECT * FROM r WHERE r.excludedNode.subExcludedNode.someProperty=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "value" }]} + await query_documents_with_custom_query(created_Container, query) + + # Querying for a document using foo, or even subDoc/searchable > succeed because they were not excluded + query = {"query": "SELECT * FROM r WHERE r.foo=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "bar" }]} + await query_documents_with_custom_query(created_Container, query) + + query = {"query": "SELECT * FROM r WHERE r.subDoc.searchable=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "searchable" }]} + await query_documents_with_custom_query(created_Container, query) + + # Cleanup + await db.delete_container(created_Container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def range_scan_on_hash_index(db): + """When a range index is not available (i.e. Only hash or no index found on the path), comparisons queries can still + be performed as scans using Allow scan request headers passed through options + + This method demonstrates how to force a scan when only hash indexes exist on the path + + ===== Warning===== + This was made an opt-in model by design. + Scanning is an expensive operation and doing this will have a large impact + on RequstUnits charged for an operation and will likely result in queries being throttled sooner. + """ + try: + await delete_container_if_exists(db, CONTAINER_ID) + + # Force a range scan operation on a hash indexed path + container_to_create = { "id" : CONTAINER_ID , + "indexingPolicy" : + { + "includedPaths" : [ {'path' : "/"} ], + "excludedPaths" : [ {'path' : "/length/*"} ] # exclude length + } + } + created_Container = await db.create_container( + id=container_to_create['id'], + indexing_policy=container_to_create['indexingPolicy'], + partition_key=PARTITION_KEY + ) + properties = await created_Container.read() + print(created_Container) + print("\n" + "-" * 25 + "\n5. Container created with index policy") + print_dictionary_items(properties["indexingPolicy"]) + + doc1 = await created_Container.create_item(body={ "id" : "dyn1", "length" : 10, "width" : 5, "height" : 15 }) + doc2 = await created_Container.create_item(body={ "id" : "dyn2", "length" : 7, "width" : 15 }) + doc3 = await created_Container.create_item(body={ "id" : "dyn3", "length" : 2 }) + print("Three docs created with ids : ", doc1["id"], doc2["id"], doc3["id"]) + + # Query for length > 5 - fail, this is a range based query on a Hash index only document + query = { "query": "SELECT * FROM r WHERE r.length > 5" } + await query_documents_with_custom_query(created_Container, query) + + # Now add IndexingDirective and repeat query + # expect 200 OK because now we are explicitly allowing scans in a query + # using the enableScanInQuery directive + results = created_Container.query_items( + query, + enable_scan_in_query=True, + enable_cross_partition_query=True + ) + print("Printing documents queried by range by providing enableScanInQuery = True") + async for doc in results: print(doc["id"]) + + # Cleanup + await db.delete_container(created_Container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def use_range_indexes_on_strings(db): + """Showing how range queries can be performed even on strings. + + """ + try: + await delete_container_if_exists(db, CONTAINER_ID) + # containers = query_entities(client, 'container', parent_link = database_link) + # print(containers) + + # Use range indexes on strings + + # This is how you can specify a range index on strings (and numbers) for all properties. + # This is the recommended indexing policy for containers. i.e. precision -1 + #indexingPolicy = { + # 'indexingPolicy': { + # 'includedPaths': [ + # { + # 'indexes': [ + # { + # 'kind': documents.IndexKind.Range, + # 'dataType': documents.DataType.String, + # 'precision': -1 + # } + # ] + # } + # ] + # } + #} + + # For demo purposes, we are going to use the default (range on numbers, hash on strings) for the whole document (/* ) + # and just include a range index on strings for the "region". + container_definition = { + 'id': CONTAINER_ID, + 'indexingPolicy': { + 'includedPaths': [ + { + 'path': '/region/?', + 'indexes': [ + { + 'kind': documents.IndexKind.Range, + 'dataType': documents.DataType.String, + 'precision': -1 + } + ] + }, + { + 'path': '/*' + } + ] + } + } + + created_Container = await db.create_container( + id=container_definition['id'], + indexing_policy=container_definition['indexingPolicy'], + partition_key=PARTITION_KEY + ) + properties = await created_Container.read() + print(created_Container) + print("\n" + "-" * 25 + "\n6. Container created with index policy") + print_dictionary_items(properties["indexingPolicy"]) + + await created_Container.create_item(body={ "id" : "doc1", "region" : "USA" }) + await created_Container.create_item(body={ "id" : "doc2", "region" : "UK" }) + await created_Container.create_item(body={ "id" : "doc3", "region" : "Armenia" }) + await created_Container.create_item(body={ "id" : "doc4", "region" : "Egypt" }) + + # Now ordering against region is allowed. You can run the following query + query = { "query" : "SELECT * FROM r ORDER BY r.region" } + message = "Documents ordered by region" + await query_documents_with_custom_query(created_Container, query, message) + + # You can also perform filters against string comparison like >= 'UK'. Note that you can perform a prefix query, + # the equivalent of LIKE 'U%' (is >= 'U' AND < 'U') + query = { "query" : "SELECT * FROM r WHERE r.region >= 'U'" } + message = "Documents with region begining with U" + await query_documents_with_custom_query(created_Container, query, message) + + # Cleanup + await db.delete_container(created_Container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def perform_index_transformations(db): + try: + await delete_container_if_exists(db, CONTAINER_ID) + + # Create a container with default indexing policy + created_Container = await db.create_container(id=CONTAINER_ID, partition_key=PARTITION_KEY) + properties = await created_Container.read() + print(created_Container) + + print("\n" + "-" * 25 + "\n7. Container created with index policy") + print_dictionary_items(properties["indexingPolicy"]) + + # Insert some documents + doc1 = await created_Container.create_item(body={ "id" : "dyn1", "length" : 10, "width" : 5, "height" : 15 }) + doc2 = await created_Container.create_item(body={ "id" : "dyn2", "length" : 7, "width" : 15 }) + doc3 = await created_Container.create_item(body={ "id" : "dyn3", "length" : 2 }) + print("Three docs created with ids : ", doc1["id"], doc2["id"], doc3["id"], " with indexing mode", properties['indexingPolicy']['indexingMode']) + + # Switch to use string & number range indexing with maximum precision. + print("Changing to string & number range indexing with maximum precision (needed for Order By).") + + properties['indexingPolicy']['includedPaths'][0]['indexes'] = [{ + 'kind': documents.IndexKind.Range, + 'dataType': documents.DataType.String, + 'precision': -1 + }] + + created_Container = await db.replace_container( + container=created_Container.id, + partition_key=PARTITION_KEY, + indexing_policy=properties['indexingPolicy'] + ) + properties = await created_Container.read() + + # Check progress and wait for completion - should be instantaneous since we have only a few documents, but larger + # containers will take time. + print_dictionary_items(properties["indexingPolicy"]) + + # Now exclude a path from indexing to save on storage space. + print("Now excluding the path /length/ to save on storage space") + properties['indexingPolicy']['excludedPaths'] = [{"path" : "/length/*"}] + + created_Container = await db.replace_container( + container=created_Container.id, + partition_key=PARTITION_KEY, + indexing_policy=properties['indexingPolicy'] + ) + properties = await created_Container.read() + print_dictionary_items(properties["indexingPolicy"]) + + # Cleanup + await db.delete_container(created_Container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def perform_multi_orderby_query(db): + try: + await delete_container_if_exists(db, CONTAINER_ID) + + # Create a container with composite indexes + indexing_policy = { + "compositeIndexes": [ + [ + { + "path": "/numberField", + "order": "ascending" + }, + { + "path": "/stringField", + "order": "descending" + } + ], + [ + { + "path": "/numberField", + "order": "descending" + }, + { + "path": "/stringField", + "order": "ascending" + }, + { + "path": "/numberField2", + "order": "descending" + }, + { + "path": "/stringField2", + "order": "ascending" + } + ] + ] + } + + created_container = await db.create_container( + id=CONTAINER_ID, + indexing_policy=indexing_policy, + partition_key=PARTITION_KEY + ) + properties = await created_container.read() + print(created_container) + + print("\n" + "-" * 25 + "\n8. Container created with index policy") + print_dictionary_items(properties["indexingPolicy"]) + + # Insert some documents + await created_container.create_item(body={"id": "doc1", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc2", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc3", "numberField": 1, "stringField": "1", "numberField2": 2, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc4", "numberField": 1, "stringField": "1", "numberField2": 2, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc5", "numberField": 1, "stringField": "2", "numberField2": 1, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc6", "numberField": 1, "stringField": "2", "numberField2": 1, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc7", "numberField": 1, "stringField": "2", "numberField2": 2, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc8", "numberField": 1, "stringField": "2", "numberField2": 2, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc9", "numberField": 2, "stringField": "1", "numberField2": 1, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc10", "numberField": 2, "stringField": "1", "numberField2": 1, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc11", "numberField": 2, "stringField": "1", "numberField2": 2, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc12", "numberField": 2, "stringField": "1", "numberField2": 2, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc13", "numberField": 2, "stringField": "2", "numberField2": 1, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc14", "numberField": 2, "stringField": "2", "numberField2": 1, "stringField2": "2"}) + await created_container.create_item(body={"id": "doc15", "numberField": 2, "stringField": "2", "numberField2": 2, "stringField2": "1"}) + await created_container.create_item(body={"id": "doc16", "numberField": 2, "stringField": "2", "numberField2": 2, "stringField2": "2"}) + + print("Query documents and Order by 1st composite index: Ascending numberField and Descending stringField:") + + query = { + "query": "SELECT * FROM r ORDER BY r.numberField ASC, r.stringField DESC", + } + await query_documents_with_custom_query(created_container, query) + + print("Query documents and Order by inverted 2nd composite index -") + print("Ascending numberField, Descending stringField, Ascending numberField2, Descending stringField2") + + query = { + "query": "SELECT * FROM r ORDER BY r.numberField ASC, r.stringField DESC, r.numberField2 ASC, r.stringField2 DESC", + } + await query_documents_with_custom_query(created_container, query) + + # Cleanup + await db.delete_container(created_container) + print("\n") + except exceptions.CosmosResourceExistsError: + print("Entity already exists") + except exceptions.CosmosResourceNotFoundError: + print("Entity doesn't exist") + + +async def run_sample(): + try: + client = obtain_client() + await fetch_all_databases(client) + + # Create database if doesn't exist already. + created_db = await client.create_database_if_not_exists(DATABASE_ID) + print(created_db) + + # 1. Exclude a document from the index + await explicitly_exclude_from_index(created_db) + + # 2. Use manual (instead of automatic) indexing + await use_manual_indexing(created_db) + + # 4. Exclude specified document paths from the index + await exclude_paths_from_index(created_db) + + # 5. Force a range scan operation on a hash indexed path + await range_scan_on_hash_index(created_db) + + # 6. Use range indexes on strings + await use_range_indexes_on_strings(created_db) + + # 7. Perform an index transform + await perform_index_transformations(created_db) + + # 8. Perform Multi Orderby queries using composite indexes + await perform_multi_orderby_query(created_db) + + await client.close() + + except exceptions.AzureError as e: + raise e + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 94fc65b7d936..19a4040437b5 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -10,8 +10,11 @@ import azure.cosmos.exceptions as exceptions from azure.cosmos.partition_key import PartitionKey -endpoint = '' -key = '' +endpoint = 'https://simonmoreno-sql3.documents.azure.com:443/' +key = 'Bl5eNK9aXEtvqrrPJPLs2gvCdllIZr2Dvm7dmsiVfgrznrlO2CHxSVODgy1ROxr33heMMTEVMp1eEuBFW6jHgw==' + +emulator_link = 'https://localhost:8081' +emulator_key = 'C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==' import uuid @@ -28,6 +31,57 @@ def get_test_item(): } return async_item +def partition_split_test(): + client = SyncClient(endpoint, key) + db = client.create_database_if_not_exists("pker2") + container = db.create_container_if_not_exists(id="pkerc2", partition_key=PartitionKey(path="/id")) + # db = client.get_database_client("pktest") + # container = db.get_container_client("pktestcol") + for i in range(100): + body = get_test_item() + container.create_item(body=body) + query = "SELECT * FROM c" + success, errors = 0, 0 + current_pkid = client.client_connection.last_response_headers["x-ms-documentdb-partitionkeyrangeid"] + print("created items, waiting 10s, check current partition is {}".format(current_pkid)) + time.sleep(10) + for i in range(10000): + try: + x = container.query_items(query=query, enable_cross_partition_query=True) + print("Success, count: {}".format(len(list(x)))) + success += 1 + print("Successes: {}, Errors: {}".format(success, errors)) + if client.client_connection.last_response_headers["x-ms-documentdb-partitionkeyrangeid"] != current_pkid: + current_pkid = client.client_connection.last_response_headers["x-ms-documentdb-partitionkeyrangeid"] + print("PARTITION KEY RANGE ID WAS UPDATED TO {}".format(current_pkid)) + time.sleep(1) + time.sleep(1) + #Use breakpoint to stop execution, change provisioned RUs on container, wait for x-ms-offer-replace-pending header, then continue + #Increase to >10k RUs causes partition split (15k to be safe) + except Exception as e: + print(e.message) + print(e) + errors +=1 + print("Successes: {}, Errors: {}".format(success, errors)) + + #create 100 items, for i in 100000, query and sleeping, catch exception, after 10k/11k RU it will split partition + #query Select * from c + #sleep(1s) + #catch exception + +async def asynccccc(): + client = AsyncClient(endpoint, key) + db = await client.create_database_if_not_exists("ppppp") + async for x in client.list_databases(): + print("op") + print(x) + cont = await db.create_container_if_not_exists(id="pppppppp", partition_key=PartitionKey(path="/id")) + x = await cont.read() + print(x) + await client.delete_database("ppppp") + await client.close() + + async def async_crud_test(): db_name = "crudAsync" cont_name = "cont" @@ -183,7 +237,7 @@ async def create_tests(): ids2 = await timed_async_create(db2,cont2,num) print(len(ids1) == len(ids2)) -def user_test(): +def user_testsss(): client = SyncClient(endpoint, key) db = client.get_database_client("xusud") u = db.get_user_client(user="testid") @@ -199,30 +253,22 @@ async def qta(): itemId = "Async_e402afa6-badf-43f2-8ddd-83776221cb3a" print("attempting query") - y = await cont.read_offer() - print(type(y)) - print(y) - print(y.properties) - print(y.offer_throughput) - - print("replacing") - x = await cont.replace_throughput(throughput=400) - print(type(x)) - print(x.properties) - print(x.offer_throughput) - - z = cont.list_conflicts() - print(type(z)) - print(z) - - # query = "SELECT * FROM c WHERE c.id=@id" - # items = cont.query_items( - # query=query, - # parameters=[{"name":"@id", "value": itemId}], - # enable_cross_partition_query=True) - - # async for item in items: - # print(item) + query = "SELECT * FROM c" + items = cont.query_items( + query=query, + parameters=[{"name":"@id", "value": itemId}], + enable_cross_partition_query=True) + + print(items) + print(items is None) + async for item in items: + if not item: + print("NO ITEMS") + else: + print(item) + #or + list_of_items = [c async for c in items] + # x = cont.read_all_items() @@ -231,6 +277,22 @@ async def qta(): # async for item in x: # print(item) + # y = await cont.read_offer() + # print(type(y)) + # print(y) + # print(y.properties) + # print(y.offer_throughput) + + # print("replacing") + # x = await cont.replace_throughput(throughput=400) + # print(type(x)) + # print(x.properties) + # print(x.offer_throughput) + + # z = cont.list_conflicts() + # print(type(z)) + # print(z) + def qt(): client = SyncClient(endpoint, key) db = client.create_database_if_not_exists(id="qt") @@ -238,6 +300,9 @@ def qt(): id="qtc", partition_key=PartitionKey(path="/id")) + x = db.get_container_client("nice") + print(x) + # async def read_all(): # async with AsyncClient(endpoint, key) as client: # db = await client.create_database_if_not_exists("readall") @@ -251,8 +316,7 @@ def qt(): async def main(): # await read_tests() # await async_crud_test() - await qta() - qt() + await asynccccc() if __name__ == "__main__": From 5b805b8b8dda8b5cad2efbc8332d751ed29e8149 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 21 Oct 2021 17:31:27 -0400 Subject: [PATCH 20/56] oops --- sdk/cosmos/azure-cosmos/samples/index_management.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/samples/index_management.py b/sdk/cosmos/azure-cosmos/samples/index_management.py index 5cbabf856bd9..82105cdab54b 100644 --- a/sdk/cosmos/azure-cosmos/samples/index_management.py +++ b/sdk/cosmos/azure-cosmos/samples/index_management.py @@ -9,10 +9,10 @@ import config -HOST = 'https://simonmoreno-sql3.documents.azure.com:443/' -MASTER_KEY = 'Bl5eNK9aXEtvqrrPJPLs2gvCdllIZr2Dvm7dmsiVfgrznrlO2CHxSVODgy1ROxr33heMMTEVMp1eEuBFW6jHgw==' -DATABASE_ID = 'newdb' -CONTAINER_ID = "index-samples" +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] +CONTAINER_ID = config.settings['container_id'] PARTITION_KEY = PartitionKey(path='/id', kind='Hash') # A typical container has the following properties within it's indexingPolicy property From 8d8d0c4f7b7f36bd82ca39188384035f096050e7 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 10:11:05 -0400 Subject: [PATCH 21/56] fully working queries --- .../aio/document_producer.py | 280 ++++++++++++++++++ .../aio/endpoint_component.py | 199 +++++++++++++ .../aio/execution_dispatcher.py | 11 +- .../aio/multi_execution_aggregator.py | 57 ++-- .../_execution_context/endpoint_component.py | 58 ++-- .../_routing/aio/routing_map_provider.py | 4 +- .../aio/_cosmos_client_connection_async.py | 31 ++ .../samples/index_management_async.py | 8 +- .../azure-cosmos/samples/simon_testfile.py | 7 +- 9 files changed, 588 insertions(+), 67 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py create mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py new file mode 100644 index 000000000000..4b3c060ee19d --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py @@ -0,0 +1,280 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for document producer implementation in the Azure Cosmos +database service. +""" + +import numbers +from collections import deque + +import six + +from azure.cosmos import _base +from azure.cosmos._execution_context.aio.base_execution_context import _DefaultQueryExecutionContext + + +class _DocumentProducer(object): + """This class takes care of handling of the results for one single partition + key range. + + When handling an orderby query, MultiExecutionContextAggregator instantiates + one instance of this class per target partition key range and aggregates the + result of each. + """ + + def __init__(self, partition_key_target_range, client, collection_link, query, document_producer_comp, options): + """ + Constructor + """ + self._options = options + self._partition_key_target_range = partition_key_target_range + self._doc_producer_comp = document_producer_comp + self._client = client + self._buffer = deque() + + self._is_finished = False + self._has_started = False + self._cur_item = None + # initiate execution context + + path = _base.GetPathFromLink(collection_link, "docs") + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return await self._client.QueryFeed(path, collection_id, query, options, partition_key_target_range["id"]) + + self._ex_context = _DefaultQueryExecutionContext(client, self._options, fetch_fn) + + def __lt__(self, other): + return self._doc_producer_comp.compare(self, other) < 0 + + async def __aiter__(self): + return self + + async def __anext__(self): + """ + :return: The next result item. + :rtype: dict + :raises StopIteration: If there is no more result. + + """ + if self._cur_item is not None: + res = self._cur_item + self._cur_item = None + return res + + return await self._ex_context.__anext__() + + def get_target_range(self): + """Returns the target partition key range. + :return: + Target partition key range. + :rtype: dict + """ + return self._partition_key_target_range + + async def peek(self): + """ + TODO: use more_itertools.peekable instead + :return: The current result item. + :rtype: dict. + :raises StopIteration: If there is no current item. + + """ + if self._cur_item is None: + self._cur_item = await self._ex_context.__anext__() + + return self._cur_item + + + +def _compare_helper(a, b): + if a is None and b is None: + return 0 + return (a > b) - (a < b) + + +class _PartitionKeyRangeDocumentProduerComparator(object): + """ + Provides a Comparator for document producers using the min value of the + corresponding target partition. + """ + + def __init__(self): + pass + + def compare(self, doc_producer1, doc_producer2): # pylint: disable=no-self-use + return _compare_helper( + doc_producer1.get_target_range()["minInclusive"], doc_producer2.get_target_range()["minInclusive"] + ) + + +class _OrderByHelper(object): + + @staticmethod + def getTypeOrd(orderby_item): + """Returns the ordinal of the value of the item pair in the dictionary. + + :param dict orderby_item: + + :return: + 0 if the item_pair doesn't have any 'item' key + 1 if the value is undefined + 2 if the value is a boolean + 4 if the value is a number + 5 if the value is a str or a unicode + :rtype: int + """ + if "item" not in orderby_item: + return 0 + val = orderby_item["item"] + if val is None: + return 1 + if isinstance(val, bool): + return 2 + if isinstance(val, numbers.Number): + return 4 + if isinstance(val, six.string_types): + return 5 + + raise TypeError("unknown type" + str(val)) + + @staticmethod + def getTypeStr(orderby_item): + """Returns the string representation of the type + + :param dict orderby_item: + :return: String representation of the type + :rtype: str + """ + if "item" not in orderby_item: + return "NoValue" + val = orderby_item["item"] + if val is None: + return "Null" + if isinstance(val, bool): + return "Boolean" + if isinstance(val, numbers.Number): + return "Number" + if isinstance(val, six.string_types): + return "String" + + raise TypeError("unknown type" + str(val)) + + @staticmethod + def compare(orderby_item1, orderby_item2): + """Compare two orderby item pairs. + + :param dict orderby_item1: + :param dict orderby_item2: + :return: + Integer comparison result. + The comparator acts such that + - if the types are different we get: + Undefined value < Null < booleans < Numbers < Strings + - if both arguments are of the same type: + it simply compares the values. + :rtype: int + """ + + type1_ord = _OrderByHelper.getTypeOrd(orderby_item1) + type2_ord = _OrderByHelper.getTypeOrd(orderby_item2) + + type_ord_diff = type1_ord - type2_ord + + if type_ord_diff: + return type_ord_diff + + # the same type, + if type1_ord == 0: + return 0 + + return _compare_helper(orderby_item1["item"], orderby_item2["item"]) + + +def _peek_order_by_items(peek_result): + return peek_result["orderByItems"] + + +class _OrderByDocumentProducerComparator(_PartitionKeyRangeDocumentProduerComparator): + """Provide a Comparator for document producers which respects orderby sort order. + """ + + def __init__(self, sort_order): # pylint: disable=super-init-not-called + """Instantiates this class + + :param list sort_order: + List of sort orders (i.e., Ascending, Descending) + + :ivar list sort_order: + List of sort orders (i.e., Ascending, Descending) + + """ + self._sort_order = sort_order + + def compare(self, doc_producer1, doc_producer2): + """Compares the given two instances of DocumentProducers. + + Based on the orderby query items and whether the sort order is Ascending + or Descending compares the peek result of the two DocumentProducers. + + If the peek results are equal based on the sort order, this comparator + compares the target partition key range of the two DocumentProducers. + + :param _DocumentProducer doc_producers1: first instance + :param _DocumentProducer doc_producers2: first instance + :return: + Integer value of compare result. + positive integer if doc_producers1 > doc_producers2 + negative integer if doc_producers1 < doc_producers2 + :rtype: int + """ + + res1 = _peek_order_by_items(doc_producer1.peek()) + res2 = _peek_order_by_items(doc_producer2.peek()) + + self._validate_orderby_items(res1, res2) + + for i, (elt1, elt2) in enumerate(zip(res1, res2)): + res = _OrderByHelper.compare(elt1, elt2) + if res != 0: + if self._sort_order[i] == "Ascending": + return res + if self._sort_order[i] == "Descending": + return -res + + return _PartitionKeyRangeDocumentProduerComparator.compare(self, doc_producer1, doc_producer2) + + def _validate_orderby_items(self, res1, res2): + if len(res1) != len(res2): + # error + raise ValueError("orderByItems cannot have different size") + + if len(res1) != len(self._sort_order): + # error + raise ValueError("orderByItems cannot have a different size than sort orders.") + + for elt1, elt2 in zip(res1, res2): + type1 = _OrderByHelper.getTypeStr(elt1) + type2 = _OrderByHelper.getTypeStr(elt2) + if type1 != type2: + raise ValueError("Expected {}, but got {}.".format(type1, type2)) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py new file mode 100644 index 000000000000..fa0df19174c9 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py @@ -0,0 +1,199 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for query execution endpoint component implementation in the +Azure Cosmos database service. +""" +import numbers +import copy +import hashlib +import json +import six + +from azure.cosmos._execution_context.aggregators import ( + _AverageAggregator, + _CountAggregator, + _MaxAggregator, + _MinAggregator, + _SumAggregator, +) + + +class _QueryExecutionEndpointComponent(object): + def __init__(self, execution_context): + self._execution_context = execution_context + + async def __aiter__(self): + return self + + async def __anext__(self): + # supports python 3 iterator + return await self._execution_context.__anext__() + + +class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling an order by query. + + For each processed orderby result it returns 'payload' item of the result. + """ + async def __anext__(self): + payload = await self._execution_context.__anext__() + return payload["payload"] + + +class _QueryExecutionTopEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling top query. + + It only returns as many results as top arg specified. + """ + + def __init__(self, execution_context, top_count): + super(_QueryExecutionTopEndpointComponent, self).__init__(execution_context) + self._top_count = top_count + + async def __anext__(self): + if self._top_count > 0: + res = await self._execution_context.__anext__() + self._top_count -= 1 + return res + raise StopAsyncIteration + + +class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctOrderedEndpointComponent, self).__init__(execution_context) + self.last_result = None + + async def __anext__(self): + res = await self._execution_context.__anext__() + while self.last_result == res: + res = await self._execution_context.__anext__() + self.last_result = res + return res + + +class _QueryExecutionDistinctUnorderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) + self.last_result = set() + + def make_hash(self, value): + if isinstance(value, (set, tuple, list)): + return tuple([self.make_hash(v) for v in value]) + if not isinstance(value, dict): + if isinstance(value, numbers.Number): + return float(value) + return value + new_value = copy.deepcopy(value) + for k, v in new_value.items(): + new_value[k] = self.make_hash(v) + + return tuple(frozenset(sorted(new_value.items()))) + + async def __anext__(self): + res = await self._execution_context.__anext__() + + json_repr = json.dumps(self.make_hash(res)) + if six.PY3: + json_repr = json_repr.encode("utf-8") + + hash_object = hashlib.sha1(json_repr) # nosec + hashed_result = hash_object.hexdigest() + + while hashed_result in self.last_result: + res = await self._execution_context.__anext__() + json_repr = json.dumps(self.make_hash(res)) + if six.PY3: + json_repr = json_repr.encode("utf-8") + + hash_object = hashlib.sha1(json_repr) # nosec + hashed_result = hash_object.hexdigest() + self.last_result.add(hashed_result) + return res + + +class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling offset query. + + It returns results offset by as many results as offset arg specified. + """ + def __init__(self, execution_context, offset_count): + super(_QueryExecutionOffsetEndpointComponent, self).__init__(execution_context) + self._offset_count = offset_count + + async def __anext__(self): + while self._offset_count > 0: + res = await self._execution_context.__anext__() + if res is not None: + self._offset_count -= 1 + else: + raise StopAsyncIteration + return await self._execution_context.__anext__() + + +class _QueryExecutionAggregateEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling aggregate query. + + It returns only aggreated values. + """ + + def __init__(self, execution_context, aggregate_operators): + super(_QueryExecutionAggregateEndpointComponent, self).__init__(execution_context) + self._local_aggregators = [] + self._results = None + self._result_index = 0 + for operator in aggregate_operators: + if operator == "Average": + self._local_aggregators.append(_AverageAggregator()) + elif operator == "Count": + self._local_aggregators.append(_CountAggregator()) + elif operator == "Max": + self._local_aggregators.append(_MaxAggregator()) + elif operator == "Min": + self._local_aggregators.append(_MinAggregator()) + elif operator == "Sum": + self._local_aggregators.append(_SumAggregator()) + + async def __anext__(self): + async for res in self._execution_context: + for item in res: #TODO check on this being an async loop + for operator in self._local_aggregators: + if isinstance(item, dict) and item: + operator.aggregate(item["item"]) + elif isinstance(item, numbers.Number): + operator.aggregate(item) + if self._results is None: + self._results = [] + for operator in self._local_aggregators: + self._results.append(operator.get_result()) + if self._result_index < len(self._results): + res = self._results[self._result_index] + self._result_index += 1 + return res + raise StopAsyncIteration diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py index 3eb4341bf6f7..5dcff70d93a5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py @@ -29,7 +29,7 @@ from azure.cosmos._execution_context.aio import multi_execution_aggregator from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase, _DefaultQueryExecutionContext from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo -from azure.cosmos._execution_context import endpoint_component +from azure.cosmos._execution_context.aio import endpoint_component from azure.cosmos.documents import _DistinctType from azure.cosmos.http_constants import StatusCodes @@ -92,13 +92,13 @@ async def fetch_next_block(self): query_to_use = self._query if self._query is not None else "Select * from root r" query_execution_info = _PartitionedQueryExecutionInfo(await self._client._GetQueryPlanThroughGateway (query_to_use, self._resource_link)) - self._execution_context = self._create_pipelined_execution_context(query_execution_info) + self._execution_context = await self._create_pipelined_execution_context(query_execution_info) else: raise e return await self._execution_context.fetch_next_block() - def _create_pipelined_execution_context(self, query_execution_info): + async def _create_pipelined_execution_context(self, query_execution_info): assert self._resource_link, "code bug, resource_link is required." if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): @@ -112,6 +112,7 @@ def _create_pipelined_execution_context(self, query_execution_info): self._query, self._options, query_execution_info) + await execution_context_aggregator._configure_partition_ranges() return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, query_execution_info) @@ -184,8 +185,8 @@ async def fetch_next_block(self): results = [] for _ in xrange(self._page_size): try: - results.append(await self.__anext__) - except StopIteration: + results.append(await self.__anext__()) + except StopAsyncIteration: # no more results break return results \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py index 12af9cb22162..25d4285c5e0f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py @@ -24,7 +24,7 @@ import heapq from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase -from azure.cosmos._execution_context import document_producer +from azure.cosmos._execution_context.aio import document_producer from azure.cosmos._routing import routing_range # pylint: disable=protected-access @@ -78,30 +78,6 @@ def __init__(self, client, resource_link, query, options, partitioned_query_ex_i else: self._document_producer_comparator = document_producer._PartitionKeyRangeDocumentProduerComparator() - # will be a list of (partition_min, partition_max) tuples - targetPartitionRanges = self._get_target_partition_key_range() - - targetPartitionQueryExecutionContextList = [] - for partitionTargetRange in targetPartitionRanges: - # create and add the child execution context for the target range - targetPartitionQueryExecutionContextList.append( - self._createTargetPartitionQueryExecutionContext(partitionTargetRange) - ) - - self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() - - for targetQueryExContext in targetPartitionQueryExecutionContextList: - - try: - # TODO: we can also use more_itertools.peekable to be more python friendly - targetQueryExContext.peek() - # if there are matching results in the target ex range add it to the priority queue - - self._orderByPQ.push(targetQueryExContext) - - except StopIteration: - continue - async def __anext__(self): """Returns the next result @@ -112,14 +88,14 @@ async def __anext__(self): if self._orderByPQ.size() > 0: targetRangeExContext = self._orderByPQ.pop() - res = next(targetRangeExContext) + res = await targetRangeExContext.__anext__() try: # TODO: we can also use more_itertools.peekable to be more python friendly - targetRangeExContext.peek() + await targetRangeExContext.peek() self._orderByPQ.push(targetRangeExContext) - except StopIteration: + except StopAsyncIteration: pass return res @@ -157,3 +133,28 @@ async def _get_target_partition_key_range(self): return await self._routing_provider.get_overlapping_ranges( self._resource_link, [routing_range.Range.ParseFromDict(range_as_dict) for range_as_dict in query_ranges] ) + + async def _configure_partition_ranges(self): + # will be a list of (partition_min, partition_max) tuples + targetPartitionRanges = await self._get_target_partition_key_range() + + targetPartitionQueryExecutionContextList = [] + for partitionTargetRange in targetPartitionRanges: + # create and add the child execution context for the target range + targetPartitionQueryExecutionContextList.append( + self._createTargetPartitionQueryExecutionContext(partitionTargetRange) + ) + + self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() + + for targetQueryExContext in targetPartitionQueryExecutionContextList: + + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + await targetQueryExContext.peek() + # if there are matching results in the target ex range add it to the priority queue + + self._orderByPQ.push(targetQueryExContext) + + except StopAsyncIteration: + continue diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 7875ce70fa67..4176451c6532 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -41,12 +41,14 @@ class _QueryExecutionEndpointComponent(object): def __init__(self, execution_context): self._execution_context = execution_context - async def __aiter__(self): + def __iter__(self): return self - async def __anext__(self): + def __next__(self): # supports python 3 iterator - return await self._execution_context.__anext__() + return next(self._execution_context) + + next = __next__ # Python 2 compatibility. class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): @@ -54,8 +56,10 @@ class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): For each processed orderby result it returns 'payload' item of the result. """ - async def __anext__(self): - return await self._execution_context.__anext__()["payload"] + def __next__(self): + return next(self._execution_context)["payload"] + + next = __next__ # Python 2 compatibility. class _QueryExecutionTopEndpointComponent(_QueryExecutionEndpointComponent): @@ -68,12 +72,14 @@ def __init__(self, execution_context, top_count): super(_QueryExecutionTopEndpointComponent, self).__init__(execution_context) self._top_count = top_count - async def __anext__(self): + def __next__(self): if self._top_count > 0: - res = await self._execution_context.__anext__() + res = next(self._execution_context) self._top_count -= 1 return res - raise StopAsyncIteration + raise StopIteration + + next = __next__ # Python 2 compatibility. class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): @@ -85,13 +91,15 @@ def __init__(self, execution_context): super(_QueryExecutionDistinctOrderedEndpointComponent, self).__init__(execution_context) self.last_result = None - async def __anext__(self): - res = await self._execution_context.__anext__() + def __next__(self): + res = next(self._execution_context) while self.last_result == res: - res = await self._execution_context.__anext__() + res = next(self._execution_context) self.last_result = res return res + next = __next__ # Python 2 compatibility. + class _QueryExecutionDistinctUnorderedEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling distinct query. @@ -115,8 +123,8 @@ def make_hash(self, value): return tuple(frozenset(sorted(new_value.items()))) - async def __anext__(self): - res = await self._execution_context.__anext__() + def __next__(self): + res = next(self._execution_context) json_repr = json.dumps(self.make_hash(res)) if six.PY3: @@ -126,7 +134,7 @@ async def __anext__(self): hashed_result = hash_object.hexdigest() while hashed_result in self.last_result: - res = await self._execution_context.__anext__() + res = next(self._execution_context) json_repr = json.dumps(self.make_hash(res)) if six.PY3: json_repr = json_repr.encode("utf-8") @@ -136,6 +144,8 @@ async def __anext__(self): self.last_result.add(hashed_result) return res + next = __next__ # Python 2 compatibility. + class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): """Represents an endpoint in handling offset query. @@ -146,14 +156,16 @@ def __init__(self, execution_context, offset_count): super(_QueryExecutionOffsetEndpointComponent, self).__init__(execution_context) self._offset_count = offset_count - async def __anext__(self): + def __next__(self): while self._offset_count > 0: - res = await self._execution_context.__anext__() + res = next(self._execution_context) if res is not None: self._offset_count -= 1 else: - raise StopAsyncIteration - return await self._execution_context.__anext__() + raise StopIteration + return next(self._execution_context) + + next = __next__ # Python 2 compatibility. class _QueryExecutionAggregateEndpointComponent(_QueryExecutionEndpointComponent): @@ -179,9 +191,9 @@ def __init__(self, execution_context, aggregate_operators): elif operator == "Sum": self._local_aggregators.append(_SumAggregator()) - async def __anext__(self): - async for res in self._execution_context: - for item in res: #TODO check on this being an async loop + def __next__(self): + for res in self._execution_context: + for item in res: for operator in self._local_aggregators: if isinstance(item, dict) and item: operator.aggregate(item["item"]) @@ -195,4 +207,6 @@ async def __anext__(self): res = self._results[self._result_index] self._result_index += 1 return res - raise StopAsyncIteration + raise StopIteration + + next = __next__ # Python 2 compatibility. \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py index 474b26e4550b..77fc8ed2f2ad 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py @@ -131,7 +131,7 @@ class SmartRoutingMapProvider(PartitionKeyRangeCache): invocation of CollectionRoutingMap.get_overlapping_ranges() """ - def get_overlapping_ranges(self, collection_link, partition_key_ranges): + async def get_overlapping_ranges(self, collection_link, partition_key_ranges): """ Given the sorted ranges and a collection, Returns the list of overlapping partition key ranges @@ -165,7 +165,7 @@ def get_overlapping_ranges(self, collection_link, partition_key_ranges): else: queryRange = currentProvidedRange - overlappingRanges = PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) + overlappingRanges = await PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) assert overlappingRanges, "code bug: returned overlapping ranges for queryRange {} is empty".format( queryRange ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 5a86a5770c92..117b3ee8cbaa 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -2106,6 +2106,37 @@ async def fetch_fn(options): self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable ) + async def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): + """Query Feed for Document Collection resource. + + :param str path: + Path to the document collection. + :param str collection_id: + Id of the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Partition key range id. + :rtype: + tuple + + """ + return ( + await self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + partition_key_range_id, + **kwargs + ), + self.last_response_headers, + ) + async def __QueryFeed( self, path, diff --git a/sdk/cosmos/azure-cosmos/samples/index_management_async.py b/sdk/cosmos/azure-cosmos/samples/index_management_async.py index 13bace9b98a3..23e0d3cadbaf 100644 --- a/sdk/cosmos/azure-cosmos/samples/index_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/index_management_async.py @@ -1,6 +1,3 @@ -import sys -sys.path.append(r"C:\Users\simonmoreno\Repos\azure-sdk-for-python\sdk\cosmos\azure-cosmos") - import azure.cosmos.documents as documents import azure.cosmos.aio.cosmos_client as cosmos_client import azure.cosmos.exceptions as exceptions @@ -248,7 +245,7 @@ async def use_manual_indexing(db): await query_documents_with_custom_query(created_Container, query) # Cleanup - db.delete_container(created_Container) + await db.delete_container(created_Container) print("\n") except exceptions.CosmosResourceExistsError: print("Entity already exists") @@ -579,8 +576,8 @@ async def perform_multi_orderby_query(db): indexing_policy=indexing_policy, partition_key=PARTITION_KEY ) - properties = await created_container.read() print(created_container) + properties = await created_container.read() print("\n" + "-" * 25 + "\n8. Container created with index policy") print_dictionary_items(properties["indexingPolicy"]) @@ -657,6 +654,7 @@ async def run_sample(): # 8. Perform Multi Orderby queries using composite indexes await perform_multi_orderby_query(created_db) + await client.delete_database(DATABASE_ID) await client.close() except exceptions.AzureError as e: diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index 19a4040437b5..c46a2aace04d 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -10,11 +10,8 @@ import azure.cosmos.exceptions as exceptions from azure.cosmos.partition_key import PartitionKey -endpoint = 'https://simonmoreno-sql3.documents.azure.com:443/' -key = 'Bl5eNK9aXEtvqrrPJPLs2gvCdllIZr2Dvm7dmsiVfgrznrlO2CHxSVODgy1ROxr33heMMTEVMp1eEuBFW6jHgw==' - -emulator_link = 'https://localhost:8081' -emulator_key = 'C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==' +endpoint = '' +key = '' import uuid From b597ca8c38c9ac95b8ea43e028380726cf6a05c5 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 11:26:31 -0400 Subject: [PATCH 22/56] small fix to query_items() also fixed README and added examples_async --- sdk/cosmos/azure-cosmos/README.md | 1 - .../azure/cosmos/aio/container.py | 24 +- ...access_cosmos_with_resource_token_async.py | 2 +- .../samples/document_management_async.py | 14 +- .../azure-cosmos/samples/examples_async.py | 148 +++++++++ .../samples/index_management_async.py | 6 +- ...npartitioned_container_operations_async.py | 283 ++++++++++++++++++ .../azure-cosmos/samples/simon_testfile.py | 3 +- 8 files changed, 457 insertions(+), 24 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/samples/examples_async.py create mode 100644 sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index a6b16d8c7852..4d2682f6e689 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -97,7 +97,6 @@ Currently the features below are **not supported**. For alternatives options, ch ### Data Plane Limitations: * Group By queries -* Language Native async i/o (NO LONGER A LIMITATION) * Queries with COUNT from a DISTINCT subquery: SELECT COUNT (1) FROM (SELECT DISTINCT C.ID FROM C) * Bulk/Transactional batch processing * Direct TCP Mode access diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 3c16b38ca01d..14d66ba7e69d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -105,9 +105,9 @@ def _get_conflict_link(self, conflict_or_link): return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) return conflict_or_link["_self"] - def _set_partition_key(self, partition_key): + async def _set_partition_key(self, partition_key): if partition_key == NonePartitionKeyValue: - return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) #might have to await here + return CosmosClientConnection._return_undefined_or_empty_partition_key(await self.is_system_key) return partition_key @distributed_trace_async @@ -241,7 +241,7 @@ async def read_item( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) + request_options["partitionKey"] = await self._set_partition_key(partition_key) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if post_trigger_include is not None: @@ -287,8 +287,8 @@ def read_all_items( response_hook(self.client_connection.last_response_headers, items) return items - @distributed_trace - def query_items( + @distributed_trace_async + async def query_items( self, query, # type: str parameters=None, # type: Optional[List[Dict[str, object]]] @@ -352,7 +352,7 @@ def query_items( if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics if partition_key is not None: - feed_options["partitionKey"] = self._set_partition_key(partition_key) + feed_options["partitionKey"] = await self._set_partition_key(partition_key) if enable_scan_in_query is not None: feed_options["enableScanInQuery"] = enable_scan_in_query @@ -549,7 +549,7 @@ async def delete_item( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) + request_options["partitionKey"] = await self._set_partition_key(partition_key) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: @@ -650,8 +650,8 @@ def list_conflicts(self, max_item_count=None, **kwargs): response_hook(self.client_connection.last_response_headers, result) return result - @distributed_trace - def query_conflicts( + @distributed_trace_async + async def query_conflicts( self, query, # type: str parameters=None, # type: Optional[List[str]] @@ -681,7 +681,7 @@ def query_conflicts( if enable_cross_partition_query is not None: feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if partition_key is not None: - feed_options["partitionKey"] = self._set_partition_key(partition_key) + feed_options["partitionKey"] = await self._set_partition_key(partition_key) result = self.client_connection.QueryConflicts( collection_link=self.container_link, @@ -708,7 +708,7 @@ async def get_conflict(self, conflict, partition_key, **kwargs): request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) + request_options["partitionKey"] = await self._set_partition_key(partition_key) result = await self.client_connection.ReadConflict( conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs @@ -734,7 +734,7 @@ async def delete_conflict(self, conflict, partition_key, **kwargs): request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: - request_options["partitionKey"] = self._set_partition_key(partition_key) + request_options["partitionKey"] = await self._set_partition_key(partition_key) result = await self.client_connection.DeleteConflict( conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs diff --git a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py index bd3d0802a369..c77107f02ec4 100644 --- a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py +++ b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py @@ -101,7 +101,7 @@ async def token_client_delete(container, username, item_id): async def token_client_query(container, username): try: - async for item in container.query_items( + async for item in await container.query_items( query="SELECT * FROM my_container c WHERE c.username=@username", parameters=[{"name": "@username", "value": username}], partition_key=username, diff --git a/sdk/cosmos/azure-cosmos/samples/document_management_async.py b/sdk/cosmos/azure-cosmos/samples/document_management_async.py index 523af364e4c6..75511e74098c 100644 --- a/sdk/cosmos/azure-cosmos/samples/document_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/document_management_async.py @@ -58,10 +58,11 @@ async def read_items(container): # result in a 429 (throttled request) read_all_items_response = container.read_all_items(max_item_count=10) - # Because the asynchronous client returns an asynchronous iterator object for methods that use - # return several items using queries, we do not need to await the function. However, attempting - # to cast this object into a list directly will throw an error; instead, iterate over the items - # using an async for loop like shown here and in the query_items() method below + # The asynchronous client returns an asynchronous iterator object for methods that + # return several items, so attempting to cast this object into a list directly will + # throw an error; instead, iterate over the items using an async for loop like shown + # here and in the query_items() method below. We also do not await read_all() because + # it doesn't deal with partition key logic the way query_items() does item_list = [item async for item in read_all_items_response] print('Found {0} items'.format(item_list.__len__())) @@ -79,7 +80,10 @@ async def query_items(container, doc_id): print('\n1.4 Querying for an Item by Id\n') # enable_cross_partition_query should be set to True as the container is partitioned - query_items_response = container.query_items( + # In this case, we do have to await the asynchronous iterator object since logic + # within the query_items() method makes network calls to verify the partition key + # deifnition in the container + query_items_response = await container.query_items( query="SELECT * FROM r WHERE r.id=@id", parameters=[ { "name":"@id", "value": doc_id } diff --git a/sdk/cosmos/azure-cosmos/samples/examples_async.py b/sdk/cosmos/azure-cosmos/samples/examples_async.py new file mode 100644 index 000000000000..1bd9a4474be8 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/examples_async.py @@ -0,0 +1,148 @@ +from azure.cosmos import exceptions, CosmosClient, PartitionKey +from azure.cosmos.aio import CosmosClient + +import os + +url = os.environ["ACCOUNT_URI"] +key = os.environ["ACCOUNT_KEY"] + +async def examples_async(): + # All interaction with Cosmos DB starts with an instance of the CosmosClient + # In order to use the asynchronous client, we need to use async/await keywords, + # which can only be used within async methods like examples_async() here + + # [START create_client] + client = CosmosClient(url, key) + # [END create_client] + + # Create a database in the account using the CosmosClient, + # specifying that the operation shouldn't throw an exception + # if a database with the given ID already exists. + # [START create_database] + database_name = "testDatabase" + try: + database = await client.create_database(id=database_name) + except exceptions.CosmosResourceExistsError: + database = client.get_database_client(database=database_name) + # [END create_database] + + # Create a container, handling the exception if a container with the + # same ID (name) already exists in the database. + # [START create_container] + container_name = "products" + try: + container = await database.create_container( + id=container_name, partition_key=PartitionKey(path="/productName") + ) + except exceptions.CosmosResourceExistsError: + container = database.get_container_client(container_name) + # [END create_container] + + # Create a container with custom settings. This example + # creates a container with a custom partition key. + # [START create_container_with_settings] + customer_container_name = "customers" + try: + customer_container = await database.create_container( + id=customer_container_name, + partition_key=PartitionKey(path="/city"), + default_ttl=200, + ) + except exceptions.CosmosResourceExistsError: + customer_container = database.get_container_client(customer_container_name) + # [END create_container_with_settings] + + # Retrieve a container by walking down the resource hierarchy + # (client->database->container), handling the exception generated + # if no container with the specified ID was found in the database. + # [START get_container] + database = client.get_database_client(database_name) + container = database.get_container_client(container_name) + # [END get_container] + + # [START list_containers] + database = client.get_database_client(database_name) + for container in database.list_containers(): + print("Container ID: {}".format(container['id'])) + # [END list_containers] + + # Insert new items by defining a dict and calling Container.upsert_item + # [START upsert_items] + container = database.get_container_client(container_name) + for i in range(1, 10): + await container.upsert_item( + dict(id="item{}".format(i), productName="Widget", productModel="Model {}".format(i)) + ) + # [END upsert_items] + + # Modify an existing item in the container + # [START update_item] + item = await container.read_item("item2", partition_key="Widget") + item["productModel"] = "DISCONTINUED" + updated_item = await container.upsert_item(item) + # [END update_item] + + # Query the items in a container using SQL-like syntax. This example + # gets all items whose product model hasn't been discontinued. + # The asynchronous client returns asynchronous iterators for its query methods; + # as such, we iterate over it by using an async for loop + # [START query_items] + import json + + async for item in await container.query_items( + query='SELECT * FROM products p WHERE p.productModel <> "DISCONTINUED"', + enable_cross_partition_query=True, + ): + print(json.dumps(item, indent=True)) + # [END query_items] + + # Parameterized queries are also supported. This example + # gets all items whose product model has been discontinued. + # [START query_items_param] + discontinued_items = await container.query_items( + query='SELECT * FROM products p WHERE p.productModel = @model AND p.productName="Widget"', + parameters=[dict(name="@model", value="DISCONTINUED")], + ) + async for item in discontinued_items: + print(json.dumps(item, indent=True)) + # [END query_items_param] + + # Delete items from the container. + # The Cosmos DB SQL API does not support 'DELETE' queries, + # so deletes must be done with the delete_item method + # on the container. + # [START delete_items] + async for item in await container.query_items( + query='SELECT * FROM products p WHERE p.productModel = "DISCONTINUED" AND p.productName="Widget"' + ): + await container.delete_item(item, partition_key="Widget") + # [END delete_items] + + # Retrieve the properties of a database + # [START get_database_properties] + properties = await database.read() + print(json.dumps(properties, indent=True)) + # [END get_database_properties] + + # Modify the properties of an existing container + # This example sets the default time to live (TTL) for items in the + # container to 3600 seconds (1 hour). An item in container is deleted + # when the TTL has elapsed since it was last edited. + # [START reset_container_properties] + # Set the TTL on the container to 3600 seconds (one hour) + await database.replace_container(container, partition_key=PartitionKey(path='/productName'), default_ttl=3600) + + # Display the new TTL setting for the container + container_props = await database.get_container_client(container_name).read() + print("New container TTL: {}".format(json.dumps(container_props['defaultTtl']))) + # [END reset_container_properties] + + # Create a user in the database. + # [START create_user] + try: + await database.create_user(dict(id="Walter Harp")) + except exceptions.CosmosResourceExistsError: + print("A user with that ID already exists.") + except exceptions.CosmosHttpResponseError as failure: + print("Failed to create user. Status code:{}".format(failure.status_code)) + # [END create_user] diff --git a/sdk/cosmos/azure-cosmos/samples/index_management_async.py b/sdk/cosmos/azure-cosmos/samples/index_management_async.py index 23e0d3cadbaf..59ee642634a8 100644 --- a/sdk/cosmos/azure-cosmos/samples/index_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/index_management_async.py @@ -73,7 +73,7 @@ async def query_entities(parent, entity_type, id = None): if id == None: entities = [entity async for entity in parent.read_all_items()] else: - entities = [entity async for entity in parent.query_items(find_entity_by_id_query)] + entities = [entity async for entity in await parent.query_items(find_entity_by_id_query)] except exceptions.AzureError as e: print("The following error occured while querying for the entity / entities ", entity_type, id if id != None else "") print(e) @@ -114,7 +114,7 @@ async def fetch_all_databases(client): async def query_documents_with_custom_query(container, query_with_optional_parameters, message = "Document(s) found by query: "): try: - results = container.query_items(query_with_optional_parameters, enable_cross_partition_query=True) + results = await container.query_items(query_with_optional_parameters, enable_cross_partition_query=True) print(message) async for doc in results: print(doc) @@ -370,7 +370,7 @@ async def range_scan_on_hash_index(db): # Now add IndexingDirective and repeat query # expect 200 OK because now we are explicitly allowing scans in a query # using the enableScanInQuery directive - results = created_Container.query_items( + results = await created_Container.query_items( query, enable_scan_in_query=True, enable_cross_partition_query=True diff --git a/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py b/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py new file mode 100644 index 000000000000..973c234268d2 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py @@ -0,0 +1,283 @@ +import azure.cosmos.aio.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +import requests +import six +import json +from six.moves.urllib.parse import quote as urllib_quote +import azure.cosmos.auth as auth +import azure.cosmos.partition_key as partition_key +import datetime + +import asyncio +import config + +# ---------------------------------------------------------------------------------------------------------- +# Prerequistes - +# +# 1. An Azure Cosmos account - +# https:#azure.microsoft.com/en-us/documentation/articles/documentdb-create-account/ +# +# 2. Microsoft Azure Cosmos PyPi package - +# https://pypi.python.org/pypi/azure-cosmos/ +# ---------------------------------------------------------------------------------------------------------- +# Sample - demonstrates the basic CRUD operations on a Item resource in a non partitioned container +# ---------------------------------------------------------------------------------------------------------- + +HOST = config.settings['host'] +MASTER_KEY = config.settings['master_key'] +DATABASE_ID = config.settings['database_id'] +CONTAINER_ID = config.settings['container_id'] + +def create_nonpartitioned_container(db): + # Create a non partitioned container using the rest API and older version + client = requests.Session() + base_url_split = HOST.split(":") + resource_url = base_url_split[0] + ":" + base_url_split[1] + ":" + base_url_split[2].split("/")[ + 0] + "//dbs/" + db.id + "/colls/" + verb = "post" + resource_id_or_fullname = "dbs/" + db.id + resource_type = "colls" + data = '{"id":"mycoll"}' + + headers = {} + headers["x-ms-version"] = "2018-09-17" + headers["x-ms-date"] = (datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')) + headers['authorization'] = get_authorization(db.client_connection, verb, + resource_id_or_fullname, resource_type, headers) + response = client.request(verb, + resource_url, + data=data, + headers=headers, + timeout=60, + stream=False, + verify=False) + + data = response.content + if not six.PY2: + # python 3 compatible: convert data from byte to unicode string + data = data.decode('utf-8') + data = json.loads(data) + created_container = db.get_container_client("mycoll") + + # Create a document in the non partitioned container using the rest API and older version + resource_url = base_url_split[0] + ":" + base_url_split[1] + ":" + base_url_split[2].split("/")[0] \ + + "//dbs/" + db.id + "/colls/" + created_container.id + "/docs/" + resource_id_or_fullname = "dbs/" + db.id + "/colls/" + created_container.id + resource_type = "docs" + data = json.dumps(get_sales_order('SalesOrder0')) + + headers['authorization'] = get_authorization(db.client_connection, verb, + resource_id_or_fullname, resource_type, headers) + response = client.request(verb, + resource_url, + data=data, + headers=headers, + timeout=60, + stream=False, + verify=False) + + data = response.content + if not six.PY2: + # python 3 compatible: convert data from byte to unicode string + data = data.decode('utf-8') + data = json.loads(data) + return created_container, "SalesOrder0" + + +def get_authorization(client, verb, resource_id_or_fullname, resource_type, headers): + authorization = auth.GetAuthorizationHeader( + cosmos_client_connection=client, + verb=verb, + path='', + resource_id_or_fullname=resource_id_or_fullname, + is_name_based=True, + resource_type=resource_type, + headers=headers) + + # urllib.quote throws when the input parameter is None + if authorization: + # -_.!~*'() are valid characters in url, and shouldn't be quoted. + authorization = urllib_quote(authorization, '-_.!~*\'()') + + return authorization + + +async def create_items(container): + print('Creating Items') + print('\n1.1 Create Item\n') + + # Create a SalesOrder object. This object has nested properties and various types including numbers, DateTimes and strings. + # This can be saved as JSON as is without converting into rows/columns. + sales_order = get_sales_order("SalesOrder1") + await container.create_item(body=sales_order) + + # As your app evolves, let's say your object has a new schema. You can insert SalesOrderV2 objects without any + # changes to the database tier. + sales_order2 = get_sales_order_v2("SalesOrder2") + await container.create_item(body=sales_order2) + + +async def read_item(container, doc_id): + print('\n1.2 Reading Item by Id\n') + + # Note that Reads require a partition key to be spcified. + response = await container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) + + print('Item read by Id {0}'.format(doc_id)) + print('Account Number: {0}'.format(response.get('account_number'))) + print('Subtotal: {0}'.format(response.get('subtotal'))) + + +async def read_items(container): + print('\n1.3 - Reading all items in a container\n') + + # NOTE: Use MaxItemCount on Options to control how many items come back per trip to the server + # Important to handle throttles whenever you are doing operations such as this that might + # result in a 429 (throttled request) + item_list = [item async for item in container.read_all_items(max_item_count=10)] + + print('Found {0} items'.format(item_list.__len__())) + + for doc in item_list: + print('Item Id: {0}'.format(doc.get('id'))) + + +async def query_items(container, doc_id): + print('\n1.4 Querying for an Item by Id\n') + + # enable_cross_partition_query should be set to True as the container is partitioned + items = [item async for item in await container.query_items( + query="SELECT * FROM r WHERE r.id=@id", + parameters=[ + {"name": "@id", "value": doc_id} + ], + enable_cross_partition_query=True + )] + + print('Item queried by Id {0}'.format(items[0].get("id"))) + + +async def replace_item(container, doc_id): + print('\n1.5 Replace an Item\n') + + read_item = await container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) + read_item['subtotal'] = read_item['subtotal'] + 1 + response = await container.replace_item(item=read_item, body=read_item) + + print('Replaced Item\'s Id is {0}, new subtotal={1}'.format(response['id'], response['subtotal'])) + + +async def upsert_item(container, doc_id): + print('\n1.6 Upserting an item\n') + + read_item = await container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) + read_item['subtotal'] = read_item['subtotal'] + 1 + response = await container.upsert_item(body=read_item) + + print('Upserted Item\'s Id is {0}, new subtotal={1}'.format(response['id'], response['subtotal'])) + + +async def delete_item(container, doc_id): + print('\n1.7 Deleting Item by Id\n') + + await container.delete_item(item=doc_id, partition_key=partition_key.NonePartitionKeyValue) + + print('Deleted item\'s Id is {0}'.format(doc_id)) + + +def get_sales_order(item_id): + order1 = {'id': item_id, + 'account_number': 'Account1', + 'purchase_order_number': 'PO18009186470', + 'order_date': datetime.date(2005, 1, 10).strftime('%c'), + 'subtotal': 419.4589, + 'tax_amount': 12.5838, + 'freight': 472.3108, + 'total_due': 985.018, + 'items': [ + {'order_qty': 1, + 'product_id': 100, + 'unit_price': 418.4589, + 'line_price': 418.4589 + } + ], + 'ttl': 60 * 60 * 24 * 30 + } + + return order1 + + +def get_sales_order_v2(item_id): + # notice new fields have been added to the sales order + order2 = {'id': item_id, + 'account_number': 'Account2', + 'purchase_order_number': 'PO15428132599', + 'order_date': datetime.date(2005, 7, 11).strftime('%c'), + 'due_date': datetime.date(2005, 7, 21).strftime('%c'), + 'shipped_date': datetime.date(2005, 7, 15).strftime('%c'), + 'subtotal': 6107.0820, + 'tax_amount': 586.1203, + 'freight': 183.1626, + 'discount_amt': 1982.872, + 'total_due': 4893.3929, + 'items': [ + {'order_qty': 3, + 'product_code': 'A-123', # notice how in item details we no longer reference a ProductId + 'product_name': 'Product 1', # instead we have decided to denormalise our schema and include + 'currency_symbol': '$', # the Product details relevant to the Order on to the Order directly + 'currecny_code': 'USD', + # this is a typical refactor that happens in the course of an application + 'unit_price': 17.1, + # that would have previously required schema changes and data migrations etc. + 'line_price': 5.7 + } + ], + 'ttl': 60 * 60 * 24 * 30 + } + + return order2 + + +async def run_sample(): + client = cosmos_client.CosmosClient(HOST, MASTER_KEY) + try: + # setup database for this sample + try: + db = await client.create_database(id=DATABASE_ID) + except exceptions.CosmosResourceExistsError: + db = await client.get_database_client(DATABASE_ID) + + # setup container for this sample + try: + container, document = create_nonpartitioned_container(db) + print('Container with id \'{0}\' created'.format(CONTAINER_ID)) + + except exceptions.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) + + # Read Item created in non partitioned container using older API version + await read_item(container, document) + await create_items(container) + await read_items(container) + await query_items(container, 'SalesOrder1') + await replace_item(container, 'SalesOrder1') + await upsert_item(container, 'SalesOrder1') + await delete_item(container, 'SalesOrder1') + + # cleanup database after sample + try: + await client.delete_database(db) + except exceptions.CosmosResourceNotFoundError: + pass + + except exceptions.CosmosHttpResponseError as e: + print('\nrun_sample has caught an error. {0}'.format(e.message)) + + finally: + await client.close() + print("\nrun_sample done") + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run_sample()) diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py index c46a2aace04d..30121971712c 100644 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py @@ -2,7 +2,6 @@ from time import time sys.path.append(r"C:\Users\simonmoreno\Repos\azure-sdk-for-python\sdk\cosmos\azure-cosmos") - import asyncio import time from azure.cosmos.aio.cosmos_client import CosmosClient as AsyncClient @@ -251,7 +250,7 @@ async def qta(): print("attempting query") query = "SELECT * FROM c" - items = cont.query_items( + items = await cont.query_items( query=query, parameters=[{"name":"@id", "value": itemId}], enable_cross_partition_query=True) From 18319df4cb94e298c05cccff2995311baa3706a3 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 12:08:41 -0400 Subject: [PATCH 23/56] Update _cosmos_client_connection_async.py --- .../aio/_cosmos_client_connection_async.py | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 117b3ee8cbaa..85ae94186dd7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -204,6 +204,31 @@ def __init__( # Routing map provider self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) + @property + def Session(self): + """Gets the session object from the client. """ + return self.session + + @Session.setter + def Session(self, session): + """Sets a session object on the document client. + + This will override the existing session + """ + self.session = session + + @property + def WriteEndpoint(self): + """Gets the curent write endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_write_endpoint() + + @property + def ReadEndpoint(self): + """Gets the curent read endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_read_endpoint() + async def _setup(self): if not 'database_account' in self._setup_kwargs: self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount(**self._setup_kwargs) @@ -1275,7 +1300,6 @@ async def DeleteContainer(self, collection_link, options=None, **kwargs): collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return await self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) - async def DeleteItem(self, document_link, options=None, **kwargs): """Deletes a document. From 162c44de40f84306a1b3ce5ff262cba886253145 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 12:20:10 -0400 Subject: [PATCH 24/56] Update _cosmos_client_connection.py --- .../azure-cosmos/azure/cosmos/_cosmos_client_connection.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 5095be182ecb..fabe08cf9161 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -2480,7 +2480,6 @@ def __CheckAndUnifyQueryFormat(self, query_body): @staticmethod def __ValidateResource(resource): - print(resource) id_ = resource.get("id") if id_: try: From ebbac51317c32eaf661f094b829ac10c707d0952 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 12:33:48 -0400 Subject: [PATCH 25/56] documentation update --- sdk/cosmos/azure-cosmos/azure/cosmos/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index bd0798128770..85a40106bd72 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -341,6 +341,7 @@ def get_container_client(self, container): :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, or a dict representing the properties of the container to be retrieved. + :returns: A `ContainerProxy` instance representing the retrieved database. :rtype: ~azure.cosmos.ContainerProxy .. admonition:: Example: @@ -571,7 +572,6 @@ def get_user_client(self, user): :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be retrieved. :returns: A `UserProxy` instance representing the retrieved user. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. :rtype: ~azure.cosmos.UserProxy """ if isinstance(user, UserProxy): From 470aa5b93108b0f219aef5bbf90d8d3a182db544 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 13:58:39 -0400 Subject: [PATCH 26/56] updated MIT dates and get_user_client() description --- .../azure/cosmos/_execution_context/aio/__init__.py | 2 +- .../cosmos/_execution_context/aio/base_execution_context.py | 2 +- .../azure/cosmos/_execution_context/aio/document_producer.py | 2 +- .../azure/cosmos/_execution_context/aio/endpoint_component.py | 2 +- .../cosmos/_execution_context/aio/execution_dispatcher.py | 2 +- .../_execution_context/aio/multi_execution_aggregator.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py | 2 +- .../azure/cosmos/_routing/aio/routing_map_provider.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py | 2 +- .../azure-cosmos/azure/cosmos/aio/_asynchronous_request.py | 2 +- .../azure/cosmos/aio/_cosmos_client_connection_async.py | 2 +- .../azure/cosmos/aio/_global_endpoint_manager_async.py | 2 +- .../azure-cosmos/azure/cosmos/aio/_query_iterable_async.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py | 2 +- .../azure-cosmos/azure/cosmos/aio/_retry_utility_async.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py | 4 ++-- sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py | 2 +- 20 files changed, 21 insertions(+), 21 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py index f5373937e446..7857ba17d6e6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/__init__.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py index d097ebd5f8b0..3764cc7febc7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py index 4b3c060ee19d..f024a6913a12 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py index fa0df19174c9..b0cd7242fa62 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py index 5dcff70d93a5..53774dcbd91c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py index 25d4285c5e0f..1758e825ae18 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py index f5373937e446..7857ba17d6e6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/__init__.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py index 77fc8ed2f2ad..b3dae3ccb515 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py index ca9d2e221831..1c43976e030c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index fe292174dbe7..83dde1c205dd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 85ae94186dd7..97a880638ca5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 78070648cf16..0b162e40f07c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py index c554bbf1bfa9..b3a8bafb32b9 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py index 39e2bd1264e3..b84a7f302dcd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py index cb926977844d..156728a5f56a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 14d66ba7e69d..41c701a15c54 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index e2c94513b4d8..282be6101b5c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index 4e26f19ab4a5..bae1339cfe7f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -308,6 +308,7 @@ def get_container_client(self, container): :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, or a dict representing the properties of the container to be retrieved. + :returns: A `ContainerProxy` instance representing the container. :rtype: ~azure.cosmos.ContainerProxy .. admonition:: Example: @@ -560,7 +561,6 @@ def get_user_client(self, user): :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be retrieved. :returns: A `UserProxy` instance representing the retrieved user. - :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. :rtype: ~azure.cosmos.UserProxy """ if isinstance(user, UserProxy): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 4e52bce701d0..1cac79681370 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py index ec9463f2d0f2..a33a32019d25 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -1,5 +1,5 @@ # The MIT License (MIT) -# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2021 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal From 74da690e933baa35f46c80c9d6e72835618ceaac Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 22 Oct 2021 16:25:04 -0400 Subject: [PATCH 27/56] Update CHANGELOG.md --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index e3770f312a83..85eda239a2f0 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -1,5 +1,6 @@ ## 4.2.1 (Unreleased) - +**New features** +- Added language native async i/o client ## 4.2.0 (2020-10-08) From 20718c734b1a0fb1e05e5c50db984c602890dc23 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 25 Oct 2021 10:22:50 -0400 Subject: [PATCH 28/56] Delete simon_testfile.py --- .../azure-cosmos/samples/simon_testfile.py | 320 ------------------ 1 file changed, 320 deletions(-) delete mode 100644 sdk/cosmos/azure-cosmos/samples/simon_testfile.py diff --git a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py b/sdk/cosmos/azure-cosmos/samples/simon_testfile.py deleted file mode 100644 index 30121971712c..000000000000 --- a/sdk/cosmos/azure-cosmos/samples/simon_testfile.py +++ /dev/null @@ -1,320 +0,0 @@ -import sys -from time import time -sys.path.append(r"C:\Users\simonmoreno\Repos\azure-sdk-for-python\sdk\cosmos\azure-cosmos") - -import asyncio -import time -from azure.cosmos.aio.cosmos_client import CosmosClient as AsyncClient -from azure.cosmos.cosmos_client import CosmosClient as SyncClient -import azure.cosmos.exceptions as exceptions -from azure.cosmos.partition_key import PartitionKey - -endpoint = '' -key = '' - -import uuid - -def get_test_item(): - async_item = { - 'id': 'Async_' + str(uuid.uuid4()), - 'address': { - 'state': 'WA', - 'city': 'Redmond', - 'street': '1 Microsoft Way' - }, - 'test_object': True, - 'lastName': 'Smith' - } - return async_item - -def partition_split_test(): - client = SyncClient(endpoint, key) - db = client.create_database_if_not_exists("pker2") - container = db.create_container_if_not_exists(id="pkerc2", partition_key=PartitionKey(path="/id")) - # db = client.get_database_client("pktest") - # container = db.get_container_client("pktestcol") - for i in range(100): - body = get_test_item() - container.create_item(body=body) - query = "SELECT * FROM c" - success, errors = 0, 0 - current_pkid = client.client_connection.last_response_headers["x-ms-documentdb-partitionkeyrangeid"] - print("created items, waiting 10s, check current partition is {}".format(current_pkid)) - time.sleep(10) - for i in range(10000): - try: - x = container.query_items(query=query, enable_cross_partition_query=True) - print("Success, count: {}".format(len(list(x)))) - success += 1 - print("Successes: {}, Errors: {}".format(success, errors)) - if client.client_connection.last_response_headers["x-ms-documentdb-partitionkeyrangeid"] != current_pkid: - current_pkid = client.client_connection.last_response_headers["x-ms-documentdb-partitionkeyrangeid"] - print("PARTITION KEY RANGE ID WAS UPDATED TO {}".format(current_pkid)) - time.sleep(1) - time.sleep(1) - #Use breakpoint to stop execution, change provisioned RUs on container, wait for x-ms-offer-replace-pending header, then continue - #Increase to >10k RUs causes partition split (15k to be safe) - except Exception as e: - print(e.message) - print(e) - errors +=1 - print("Successes: {}, Errors: {}".format(success, errors)) - - #create 100 items, for i in 100000, query and sleeping, catch exception, after 10k/11k RU it will split partition - #query Select * from c - #sleep(1s) - #catch exception - -async def asynccccc(): - client = AsyncClient(endpoint, key) - db = await client.create_database_if_not_exists("ppppp") - async for x in client.list_databases(): - print("op") - print(x) - cont = await db.create_container_if_not_exists(id="pppppppp", partition_key=PartitionKey(path="/id")) - x = await cont.read() - print(x) - await client.delete_database("ppppp") - await client.close() - - -async def async_crud_test(): - db_name = "crudAsync" - cont_name = "cont" - ttl = 200 - async with AsyncClient(endpoint, key) as client: - db = await client.create_database(db_name) - print("Created DB, now reading and attempting create_if_not_exist") - - await db.read() - db = await client.create_database_if_not_exists(db_name) - print("Create if not exist had no problems, deleting DB now") - - await client.delete_database(db_name) - print("DB deleted, now attempting read") - try: - await db.read() - except: - print("Error returned successfully for reading DB") - - print("Re-creating DB for testing container methods") - db = await client.create_database_if_not_exists(db_name) - cont = await db.create_container(id=cont_name, partition_key=PartitionKey(path="/lastName")) - print("Created container, now reading and attempting create_if_not_exists") - - c = await cont.read() - cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) - print("Create if not exist had no problems, replacing and deleting container now") - - assert c.get('defaultTtl') is None - await db.replace_container(container=cont_name, partition_key=PartitionKey(path='/lastName'), default_ttl=ttl) - c = await cont.read() - assert c.get('defaultTtl') == 200 - print("Container properties changed, now deleting") - - await db.delete_container(cont_name) - print("Container deleted, now attempting read") - try: - await cont.read() - except: - print("Error returned succesfully") - - print("Re-creating container for testing item methods") - cont = await db.create_container_if_not_exists(id=cont_name, partition_key=PartitionKey(path="/lastName")) - - body1 = get_test_item() - await cont.create_item(body=body1) - print("Created item, now reading and then upserting/replacing") - - body2 = get_test_item() - await cont.upsert_item(body=body1) - # Check here for read all items and verify there is still only 1 left after upsert - await cont.replace_item(item=body1["id"], body=body2) - print("Item replaced, now attempting read") - - try: - await cont.read_item(item=body1.get("id"), partition_key=body1.get("lastName")) - except: - print("Error returned succesfully, reading and deleting replaced item now") - - await cont.read_item(item=body2.get("id"), partition_key=body2.get("lastName")) - await cont.delete_item(item=body2.get("id"), partition_key=body2.get("lastName")) - print("Item deleted, now attempting read") - - try: - await cont.read_item(item=body2.get("id"), partition_key=body2.get("lastName")) - except: - print("Error returned succesfully, cleaning up account now") - await client.delete_database(db_name) - try: - await db.read() - except: - print("All cleaned up") - -def create_test(db_name, cont_name, num): - client = SyncClient(endpoint, key) - db = client.create_database(id=db_name) - container = db.create_container( - id=cont_name, - partition_key=PartitionKey(path="/id")) - ids = [] - for i in range(num): - body = get_test_item() - ids.append(body.get("id")) - container.create_item(body=body) - print("Created {} items in {} DB successfully".format(num, db_name)) - return ids - -def timed_sync_create(db_name, cont_name, num): - client = SyncClient(endpoint, key) - db = client.create_database(id=db_name) - container = db.create_container( - id=cont_name, - partition_key=PartitionKey(path="/id")) - ids = [] - start = time.time() - for i in range(num): - body = get_test_item() - ids.append(body.get("id")) - container.create_item(body=body) - print("Sync client created {} items in {} seconds".format(num, time.time() - start)) - return ids - -async def timed_async_create(db_name, cont_name, num): - async with AsyncClient(endpoint, key) as client: - db = await client.create_database_if_not_exists(id=db_name) - cont = await db.create_container_if_not_exists( - id=cont_name, - partition_key=PartitionKey(path="/id")) - ids = [] - start = time.time() - for i in range(num): - body = get_test_item() - ids.append(body.get("id")) - await cont.create_item(body=body) - print("Async client created {} items in {} seconds".format(num, time.time() - start)) - return ids - -def timed_sync_read(db2, cont2, num, ids): - client = SyncClient(endpoint, key) - db = client.get_database_client(db2) - cont = db.get_container_client(cont2) - start = time.time() - for id in ids: - x = cont.read_item(item=id, partition_key=id) - if not x: - print("Error retrieving item {}".format(id)) - print("Sync client retrieved {} items in {} seconds".format(num, time.time() - start)) - -async def timed_async_read(db1, cont1, num, ids): - async with AsyncClient(endpoint, key) as client: - db = client.get_database_client(db1) - cont = db.get_container_client(cont1) - start = time.time() - for id in ids: - x = await cont.read_item(item=id, partition_key=id) - if not x: - print("Error retrieving item {}".format(id)) - print("Async client retrieved {} items in {} seconds".format(num, time.time() - start)) - -async def read_tests(): - db = "db01" - cont = "c01" - num = 1000 - ids = create_test(db, cont, num) - timed_sync_read(db,cont,num,ids) - await timed_async_read(db,cont,num,ids) - -async def create_tests(): - db1, db2 = "db01", "db02" - cont1, cont2 = "c01", "c02" - num = 10 - ids1 = timed_sync_create(db1,cont1,num) - ids2 = await timed_async_create(db2,cont2,num) - print(len(ids1) == len(ids2)) - -def user_testsss(): - client = SyncClient(endpoint, key) - db = client.get_database_client("xusud") - u = db.get_user_client(user="testid") - data = u.read() - print(data) - perms = u.list_permissions() - print(list(perms)) - -async def qta(): - async with AsyncClient(endpoint, key) as client: - db = await client.create_database_if_not_exists("qta") - cont = await db.create_container_if_not_exists(id="qtac", partition_key=PartitionKey(path="/id")) - itemId = "Async_e402afa6-badf-43f2-8ddd-83776221cb3a" - print("attempting query") - - query = "SELECT * FROM c" - items = await cont.query_items( - query=query, - parameters=[{"name":"@id", "value": itemId}], - enable_cross_partition_query=True) - - print(items) - print(items is None) - async for item in items: - if not item: - print("NO ITEMS") - else: - print(item) - #or - list_of_items = [c async for c in items] - - - - # x = cont.read_all_items() - # #async for item in items - # # - # async for item in x: - # print(item) - - # y = await cont.read_offer() - # print(type(y)) - # print(y) - # print(y.properties) - # print(y.offer_throughput) - - # print("replacing") - # x = await cont.replace_throughput(throughput=400) - # print(type(x)) - # print(x.properties) - # print(x.offer_throughput) - - # z = cont.list_conflicts() - # print(type(z)) - # print(z) - -def qt(): - client = SyncClient(endpoint, key) - db = client.create_database_if_not_exists(id="qt") - container = db.create_container_if_not_exists( - id="qtc", - partition_key=PartitionKey(path="/id")) - - x = db.get_container_client("nice") - print(x) - -# async def read_all(): -# async with AsyncClient(endpoint, key) as client: -# db = await client.create_database_if_not_exists("readall") -# cont = await db.create_container_if_not_exists("cont", PartitionKey(path='/lastName')) -# for i in range(5): -# await cont.create_item(body=get_test_item()) -# c = await cont.read_all_items() -# print(await c.__anext__()) -# print(type(c)) - -async def main(): - # await read_tests() - # await async_crud_test() - await asynccccc() - - -if __name__ == "__main__": - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) \ No newline at end of file From e3c27a54f20c48a759e34736d29128bae910c2bf Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 25 Oct 2021 10:30:29 -0400 Subject: [PATCH 29/56] leftover retry utility --- .../azure/cosmos/aio/_retry_utility.py | 196 ------------------ .../azure/cosmos/cosmos_client.py | 3 - 2 files changed, 199 deletions(-) delete mode 100644 sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py deleted file mode 100644 index b84a7f302dcd..000000000000 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility.py +++ /dev/null @@ -1,196 +0,0 @@ -# The MIT License (MIT) -# Copyright (c) 2021 Microsoft Corporation - -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Internal methods for executing functions in the Azure Cosmos database service. -""" - -import time -import asyncio - -from azure.core.exceptions import AzureError, ClientAuthenticationError -from azure.core.pipeline.policies import AsyncRetryPolicy - -from .. import exceptions -from ..http_constants import HttpHeaders, StatusCodes, SubStatusCodes -from .._retry_utility import _configure_timeout -from .. import _endpoint_discovery_retry_policy -from .. import _resource_throttle_retry_policy -from .. import _default_retry_policy -from .. import _session_retry_policy - - -# pylint: disable=protected-access - - -async def ExecuteAsync(client, global_endpoint_manager, function, *args, **kwargs): - """Executes the function with passed parameters applying all retry policies - - :param object client: - Document client instance - :param object global_endpoint_manager: - Instance of _GlobalEndpointManager class - :param function function: - Function to be called wrapped with retries - :param (non-keyworded, variable number of arguments list) *args: - :param (keyworded, variable number of arguments list) **kwargs: - - """ - # instantiate all retry policies here to be applied for each request execution - endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy( - client.connection_policy, global_endpoint_manager, *args - ) - - resourceThrottle_retry_policy = _resource_throttle_retry_policy.ResourceThrottleRetryPolicy( - client.connection_policy.RetryOptions.MaxRetryAttemptCount, - client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, - client.connection_policy.RetryOptions.MaxWaitTimeInSeconds, - ) - defaultRetry_policy = _default_retry_policy.DefaultRetryPolicy(*args) - - sessionRetry_policy = _session_retry_policy._SessionRetryPolicy( - client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args - ) - while True: - try: - client_timeout = kwargs.get('timeout') - start_time = time.time() - if args: - result = await ExecuteFunctionAsync(function, global_endpoint_manager, *args, **kwargs) - else: - result = await ExecuteFunctionAsync(function, *args, **kwargs) - if not client.last_response_headers: - client.last_response_headers = {} - - # setting the throttle related response headers before returning the result - client.last_response_headers[ - HttpHeaders.ThrottleRetryCount - ] = resourceThrottle_retry_policy.current_retry_attempt_count - client.last_response_headers[ - HttpHeaders.ThrottleRetryWaitTimeInMs - ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds - - return result - except exceptions.CosmosHttpResponseError as e: - retry_policy = None - if e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN: - retry_policy = endpointDiscovery_retry_policy - elif e.status_code == StatusCodes.TOO_MANY_REQUESTS: - retry_policy = resourceThrottle_retry_policy - elif ( - e.status_code == StatusCodes.NOT_FOUND - and e.sub_status - and e.sub_status == SubStatusCodes.READ_SESSION_NOTAVAILABLE - ): - retry_policy = sessionRetry_policy - else: - retry_policy = defaultRetry_policy - - # If none of the retry policies applies or there is no retry needed, set the - # throttle related response hedaers and re-throw the exception back arg[0] - # is the request. It needs to be modified for write forbidden exception - if not retry_policy.ShouldRetry(e): - if not client.last_response_headers: - client.last_response_headers = {} - client.last_response_headers[ - HttpHeaders.ThrottleRetryCount - ] = resourceThrottle_retry_policy.current_retry_attempt_count - client.last_response_headers[ - HttpHeaders.ThrottleRetryWaitTimeInMs - ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds - if args and args[0].should_clear_session_token_on_session_read_failure: - client.session.clear_session_token(client.last_response_headers) - raise - - # Wait for retry_after_in_milliseconds time before the next retry - await asyncio.sleep(retry_policy.retry_after_in_milliseconds / 1000.0) - if client_timeout: - kwargs['timeout'] = client_timeout - (time.time() - start_time) - if kwargs['timeout'] <= 0: - raise exceptions.CosmosClientTimeoutError() - - -async def ExecuteFunctionAsync(function, *args, **kwargs): - """Stub method so that it can be used for mocking purposes as well. - """ - return await function(*args, **kwargs) - - -class ConnectionRetryPolicy(AsyncRetryPolicy): - - def __init__(self, **kwargs): - clean_kwargs = {k: v for k, v in kwargs.items() if v is not None} - super(ConnectionRetryPolicy, self).__init__(**clean_kwargs) - - async def send(self, request): - """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. - Also enforces an absolute client-side timeout that spans multiple retry attempts. - - :param request: The PipelineRequest object - :type request: ~azure.core.pipeline.PipelineRequest - :return: Returns the PipelineResponse or raises error if maximum retries exceeded. - :rtype: ~azure.core.pipeline.PipelineResponse - :raises ~azure.core.exceptions.AzureError: Maximum retries exceeded. - :raises ~azure.cosmos.exceptions.CosmosClientTimeoutError: Specified timeout exceeded. - :raises ~azure.core.exceptions.ClientAuthenticationError: Authentication failed. - """ - absolute_timeout = request.context.options.pop('timeout', None) - per_request_timeout = request.context.options.pop('connection_timeout', 0) - - retry_error = None - retry_active = True - response = None - retry_settings = self.configure_retries(request.context.options) - while retry_active: - try: - start_time = time.time() - _configure_timeout(request, absolute_timeout, per_request_timeout) - - response = await self.next.send(request) - if self.is_retry(retry_settings, response): - retry_active = self.increment(retry_settings, response=response) - if retry_active: - await self.sleep(retry_settings, request.context.transport, response=response) - continue - break - except ClientAuthenticationError: # pylint:disable=try-except-raise - # the authentication policy failed such that the client's request can't - # succeed--we'll never have a response to it, so propagate the exception - raise - except exceptions.CosmosClientTimeoutError as timeout_error: - timeout_error.inner_exception = retry_error - timeout_error.response = response - timeout_error.history = retry_settings['history'] - raise - except AzureError as err: - retry_error = err - if self._is_method_retryable(retry_settings, request.http_request): - retry_active = self.increment(retry_settings, response=request, error=err) - if retry_active: - await self.sleep(retry_settings, request.context.transport) - continue - raise err - finally: - end_time = time.time() - if absolute_timeout: - absolute_timeout -= (end_time - start_time) - - self.update_context(response.context, retry_settings) - return response diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 2954a3578faf..4a2e6cdcbc50 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -179,9 +179,6 @@ def __enter__(self): def __exit__(self, *args): return self.client_connection.pipeline_client.__exit__(*args) - def close(self): - self.__exit__() - @classmethod def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): # type: (str, Optional[Any], str, Any) -> CosmosClient From 3b778ad7c5f73ad61d3b27b328b1862d90b39eea Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 25 Oct 2021 16:09:45 -0400 Subject: [PATCH 30/56] Update README.md --- sdk/cosmos/azure-cosmos/README.md | 79 +++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index 4d2682f6e689..a60d8c00a6c7 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -176,6 +176,7 @@ The following sections provide several code snippets covering some of the most c * [Get database properties](#get-database-properties "Get database properties") * [Get database and container throughputs](#get-database-and-container-throughputs "Get database and container throughputs") * [Modify container properties](#modify-container-properties "Modify container properties") +* [Using the asynchronous client](#using-the-asynchronous-client "Using the asynchronous client") ### Create a database @@ -427,6 +428,84 @@ print(json.dumps(container_props['defaultTtl'])) For more information on TTL, see [Time to Live for Azure Cosmos DB data][cosmos_ttl]. +### Using the asynchronous client + +The asynchronous cosmos client looks and works in a very similar fashion to the already existing client, with the exception of its package within the sdk and the need of using async/await keywords in order to interact with it. + +```Python +from azure.cosmos.aio import CosmosClient +import os + +url = os.environ['ACCOUNT_URI'] +key = os.environ['ACCOUNT_KEY'] +client = CosmosClient(url, credential=key) +database_name = 'testDatabase' +database = client.get_database_client(database_name) +container_name = 'products' +container = database.get_container_client(container_name) + +async def create_items(): + for i in range(1, 10): + await container.upsert_item({ + 'id': 'item{0}'.format(i), + 'productName': 'Widget', + 'productModel': 'Model {0}'.format(i) + } + ) + await client.close() +``` + +It is also worth pointing out that the asynchronous client has to be closed manually after its use, either by initializing it using async with or calling the close() method directly like shown above. + +```Python +from azure.cosmos.aio import CosmosClient +import os + +url = os.environ['ACCOUNT_URI'] +key = os.environ['ACCOUNT_KEY'] +database_name = 'testDatabase' +container_name = 'products' + +async with CosmosClient(url, credential=key) as client: + database = client.get_database_client(database_name) + container = database.get_container_client(container_name) + for i in range(1, 10): + await container.upsert_item({ + 'id': 'item{0}'.format(i), + 'productName': 'Widget', + 'productModel': 'Model {0}'.format(i) + } + ) +``` + +### Queries with the asynchronous client + +Queries work the same way for the most part, and results can be directly iterated on, but because queries made by the asynchronous client return AsyncIterable objects, results can't be cast into lists directly; instead, if you need to create lists from your results, use Python's list comprehension to populate a list: + +```Python +from azure.cosmos.aio import CosmosClient +import os + +url = os.environ['ACCOUNT_URI'] +key = os.environ['ACCOUNT_KEY'] +client = CosmosClient(url, credential=key) +database_name = 'testDatabase' +database = client.get_database_client(database_name) +container_name = 'products' +container = database.get_container_client(container_name) + +async def create_lists(): + results = await container.query_items( + query='SELECT * FROM products p WHERE p.productModel = "Model 2"', + enable_cross_partition_query=True) + + # Iterating directly on results + async for item in results: + print(item) + + # Making a list from the results + item_list = [item async for item in results] +``` ## Troubleshooting From c6e352e6c13b2e7c6cc274736f2911eb062eb08d Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 28 Oct 2021 09:57:06 -0400 Subject: [PATCH 31/56] docs and removed six package --- sdk/cosmos/azure-cosmos/README.md | 4 +++ .../aio/document_producer.py | 7 ++--- .../aio/endpoint_component.py | 9 ++---- .../azure/cosmos/aio/_asynchronous_request.py | 7 ++--- .../aio/_cosmos_client_connection_async.py | 12 ++++---- .../aio/_global_endpoint_manager_async.py | 29 +++++++++++++++++-- .../azure/cosmos/aio/container.py | 7 ++--- .../azure/cosmos/aio/cosmos_client.py | 5 ++-- .../azure-cosmos/azure/cosmos/aio/database.py | 7 ++--- .../azure-cosmos/azure/cosmos/aio/scripts.py | 6 ++-- .../azure-cosmos/azure/cosmos/aio/user.py | 6 ++-- 11 files changed, 57 insertions(+), 42 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index a60d8c00a6c7..45b0e3c2fd66 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -90,6 +90,10 @@ For more information about these resources, see [Working with Azure Cosmos datab The keyword-argument `enable_cross_partition_query` accepts 2 options: `None` (default) or `True`. +## Note on using queries by id + +When using queries that try to find items based on an **id** value, always make sure you are passing in a string type variable. Azure Cosmos DB only allows string id values and if you use any other datatype, this SDK will return no results and no error messages. + ## Limitations Currently the features below are **not supported**. For alternatives options, check the **Workarounds** section below. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py index f024a6913a12..ad3ecb2f41db 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py @@ -25,8 +25,7 @@ import numbers from collections import deque - -import six +from typing import AnyStr from azure.cosmos import _base from azure.cosmos._execution_context.aio.base_execution_context import _DefaultQueryExecutionContext @@ -153,7 +152,7 @@ def getTypeOrd(orderby_item): return 2 if isinstance(val, numbers.Number): return 4 - if isinstance(val, six.string_types): + if isinstance(val, AnyStr): return 5 raise TypeError("unknown type" + str(val)) @@ -175,7 +174,7 @@ def getTypeStr(orderby_item): return "Boolean" if isinstance(val, numbers.Number): return "Number" - if isinstance(val, six.string_types): + if isinstance(val, AnyStr): return "String" raise TypeError("unknown type" + str(val)) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py index b0cd7242fa62..d17916267bae 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/endpoint_component.py @@ -26,7 +26,6 @@ import copy import hashlib import json -import six from azure.cosmos._execution_context.aggregators import ( _AverageAggregator, @@ -119,18 +118,14 @@ def make_hash(self, value): async def __anext__(self): res = await self._execution_context.__anext__() - json_repr = json.dumps(self.make_hash(res)) - if six.PY3: - json_repr = json_repr.encode("utf-8") + json_repr = json.dumps(self.make_hash(res)).encode("utf-8") hash_object = hashlib.sha1(json_repr) # nosec hashed_result = hash_object.hexdigest() while hashed_result in self.last_result: res = await self._execution_context.__anext__() - json_repr = json.dumps(self.make_hash(res)) - if six.PY3: - json_repr = json_repr.encode("utf-8") + json_repr = json.dumps(self.make_hash(res)).encode("utf-8") hash_object = hashlib.sha1(json_repr) # nosec hashed_result = hash_object.hexdigest() diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index 83dde1c205dd..fbeaee482e90 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -26,7 +26,7 @@ import time from six.moves.urllib.parse import urlparse -import six +from typing import AnyStr from azure.core.exceptions import DecodeError # type: ignore from .. import exceptions @@ -111,8 +111,7 @@ async def _Request(global_endpoint_manager, request_params, connection_policy, p headers = dict(response.headers) data = response.body() - if data and not six.PY2: - # python 3 compatible: convert data from byte to unicode string + if data: data = data.decode("utf-8") if response.status_code == 404: @@ -168,7 +167,7 @@ async def AsynchronousRequest( :rtype: tuple of (dict dict) """ request.data = _request_body_from_data(request_data) - if request.data and isinstance(request.data, six.string_types): + if request.data and isinstance(request.data, AnyStr): request.headers[http_constants.HttpHeaders.ContentLength] = len(request.data) elif request.data is None: request.headers[http_constants.HttpHeaders.ContentLength] = 0 diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 97a880638ca5..98ed4db8a3f7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -26,8 +26,8 @@ """ # https://github.com/PyCQA/pylint/issues/3112 # Currently pylint is locked to 2.3.3 and this is fixed in 2.4.4 -from typing import Dict, Any, Optional # pylint: disable=unused-import -import six +from typing import AnyStr, Dict, Any, Optional # pylint: disable=unused-import +from six.moves.urllib.parse import urlparse from urllib3.util.retry import Retry from azure.core.async_paging import AsyncItemPaged from azure.core import AsyncPipelineClient @@ -176,7 +176,7 @@ def __init__( proxies = kwargs.pop('proxies', {}) if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: host = self.connection_policy.ProxyConfiguration.Host - url = six.moves.urllib.parse.urlparse(host) + url = urlparse(host) proxy = host if url.port else host + ":" + str(self.connection_policy.ProxyConfiguration.Port) proxies.update({url.scheme : proxy}) @@ -2267,15 +2267,15 @@ def __CheckAndUnifyQueryFormat(self, query_body): self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query ): - if not isinstance(query_body, dict) and not isinstance(query_body, six.string_types): + if not isinstance(query_body, dict) and not isinstance(query_body, AnyStr): raise TypeError("query body must be a dict or string.") if isinstance(query_body, dict) and not query_body.get("query"): raise ValueError('query body must have valid query text with key "query".') - if isinstance(query_body, six.string_types): + if isinstance(query_body, AnyStr): return {"query": query_body} elif ( self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery - and not isinstance(query_body, six.string_types) + and not isinstance(query_body, AnyStr) ): raise TypeError("query body must be a string.") else: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 0b162e40f07c..b1b9132064fd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -25,7 +25,6 @@ import asyncio from six.moves.urllib.parse import urlparse - from .. import _constants as constants from .. import exceptions from .._location_cache import LocationCache @@ -140,4 +139,30 @@ async def _GetDatabaseAccountStub(self, endpoint, **kwargs): This can be used for mocking purposes as well. """ - return await self.Client.GetDatabaseAccount(endpoint, **kwargs) \ No newline at end of file + return await self.Client.GetDatabaseAccount(endpoint, **kwargs) + + @staticmethod + def GetLocationalEndpoint(default_endpoint, location_name): + # For default_endpoint like 'https://contoso.documents.azure.com:443/' parse it to + # generate URL format. This default_endpoint should be global endpoint(and cannot + # be a locational endpoint) and we agreed to document that + endpoint_url = urlparse(default_endpoint) + + # hostname attribute in endpoint_url will return 'contoso.documents.azure.com' + if endpoint_url.hostname is not None: + hostname_parts = str(endpoint_url.hostname).lower().split(".") + if hostname_parts is not None: + # global_database_account_name will return 'contoso' + global_database_account_name = hostname_parts[0] + + # Prepare the locational_database_account_name as contoso-EastUS for location_name 'East US' + locational_database_account_name = global_database_account_name + "-" + location_name.replace(" ", "") + + # Replace 'contoso' with 'contoso-EastUS' and return locational_endpoint + # as https://contoso-EastUS.documents.azure.com:443/ + locational_endpoint = default_endpoint.lower().replace( + global_database_account_name, locational_database_account_name, 1 + ) + return locational_endpoint + + return None \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 41c701a15c54..9da05775e7cd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -22,9 +22,8 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, List, Optional, Union, Iterable, cast +from typing import Any, AnyStr, Dict, List, Optional, Union, Iterable, cast -import six from azure.core.tracing.decorator import distributed_trace # pylint: disable=unused-import from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import @@ -95,13 +94,13 @@ def scripts(self): def _get_document_link(self, item_or_link): # type: (Union[Dict[str, Any], str]) -> str - if isinstance(item_or_link, six.string_types): + if isinstance(item_or_link, AnyStr): return u"{}/docs/{}".format(self.container_link, item_or_link) return item_or_link["_self"] def _get_conflict_link(self, conflict_or_link): # type: (Union[Dict[str, Any], str]) -> str - if isinstance(conflict_or_link, six.string_types): + if isinstance(conflict_or_link, AnyStr): return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) return conflict_or_link["_self"] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 282be6101b5c..57f4a8c76219 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -22,9 +22,8 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, Optional, Union, cast, Iterable, List +from typing import Any, AnyStr, Dict, Optional, Union, cast, Iterable, List -import six from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace @@ -164,7 +163,7 @@ def from_connection_string(cls, conn_str, credential=None, consistency_level="Se @staticmethod def _get_database_link(database_or_id): # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str - if isinstance(database_or_id, six.string_types): + if isinstance(database_or_id, AnyStr): return "dbs/{}".format(database_or_id) try: return cast("DatabaseProxy", database_or_id).database_link diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index bae1339cfe7f..e0b0bfce4d70 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -22,10 +22,9 @@ """Interact with databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, cast, Iterable, Optional +from typing import Any, AnyStr, List, Dict, Union, cast, Iterable, Optional import warnings -import six from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace @@ -87,7 +86,7 @@ def __repr__(self): @staticmethod def _get_container_id(container_or_id): # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str - if isinstance(container_or_id, six.string_types): + if isinstance(container_or_id, AnyStr): return container_or_id try: return cast("ContainerProxy", container_or_id).id @@ -101,7 +100,7 @@ def _get_container_link(self, container_or_id): def _get_user_link(self, user_or_id): # type: (Union[UserProxy, str, Dict[str, Any]]) -> str - if isinstance(user_or_id, six.string_types): + if isinstance(user_or_id, AnyStr): return u"{}/users/{}".format(self.database_link, user_or_id) try: return cast("UserProxy", user_or_id).user_link diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 1cac79681370..6deec2eed3e6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -22,9 +22,7 @@ """Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, Iterable, Optional - -import six +from typing import Any, AnyStr, List, Dict, Union, Iterable, Optional from azure.cosmos.aio._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options @@ -55,7 +53,7 @@ def __init__(self, client_connection, container_link, is_system_key): def _get_resource_link(self, script_or_id, typ): # type: (Union[Dict[str, Any], str], str) -> str - if isinstance(script_or_id, six.string_types): + if isinstance(script_or_id, AnyStr): return u"{}/{}/{}".format(self.container_link, typ, script_or_id) return script_or_id["_self"] diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py index a33a32019d25..6215473667ac 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -24,13 +24,11 @@ """Create, read, update and delete users in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, cast, Iterable, Optional +from typing import Any, AnyStr, List, Dict, Union, cast, Iterable, Optional -import six from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace - from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options from ..permission import Permission @@ -56,7 +54,7 @@ def __repr__(self): def _get_permission_link(self, permission_or_id): # type: (Union[Permission, str, Dict[str, Any]]) -> str - if isinstance(permission_or_id, six.string_types): + if isinstance(permission_or_id, AnyStr): return u"{}/permissions/{}".format(self.user_link, permission_or_id) try: return cast("Permission", permission_or_id).permission_link From 52736acbe4182b0495261cfea1e2fecf1602d4e1 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 4 Nov 2021 14:03:04 -0400 Subject: [PATCH 32/56] changes based on comments still missing discussion resolution on SSL verification and tests for async functionality under test module (apart from samples which are basically end to end tests) --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 2 +- .../aio/base_execution_context.py | 2 - .../aio/document_producer.py | 5 +- .../aio/execution_dispatcher.py | 3 +- .../azure/cosmos/aio/_asynchronous_request.py | 5 +- .../aio/_cosmos_client_connection_async.py | 10 +-- .../aio/_global_endpoint_manager_async.py | 6 +- .../azure/cosmos/aio/container.py | 89 +++++++------------ .../azure/cosmos/aio/cosmos_client.py | 61 +++---------- .../azure-cosmos/azure/cosmos/aio/database.py | 72 +++++---------- .../azure-cosmos/azure/cosmos/aio/scripts.py | 31 ++++--- .../azure-cosmos/azure/cosmos/aio/user.py | 18 ++-- ...npartitioned_container_operations_async.py | 11 +-- 13 files changed, 112 insertions(+), 203 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index 85eda239a2f0..ff4af76d7f08 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -1,4 +1,4 @@ -## 4.2.1 (Unreleased) +## 4.3.0 (Unreleased) **New features** - Added language native async i/o client diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py index 3764cc7febc7..b8027a611cee 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py @@ -142,8 +142,6 @@ async def callback(): return await _retry_utility_async.ExecuteAsync(self._client, self._client._global_endpoint_manager, callback) - next = __anext__ # Python 2 compatibility. - class _DefaultQueryExecutionContext(_QueryExecutionContextBase): """ diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py index ad3ecb2f41db..695afddc7a26 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/document_producer.py @@ -25,7 +25,6 @@ import numbers from collections import deque -from typing import AnyStr from azure.cosmos import _base from azure.cosmos._execution_context.aio.base_execution_context import _DefaultQueryExecutionContext @@ -152,7 +151,7 @@ def getTypeOrd(orderby_item): return 2 if isinstance(val, numbers.Number): return 4 - if isinstance(val, AnyStr): + if isinstance(val, str): return 5 raise TypeError("unknown type" + str(val)) @@ -174,7 +173,7 @@ def getTypeStr(orderby_item): return "Boolean" if isinstance(val, numbers.Number): return "Number" - if isinstance(val, AnyStr): + if isinstance(val, str): return "String" raise TypeError("unknown type" + str(val)) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py index 53774dcbd91c..adc3393da5ac 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py @@ -23,7 +23,6 @@ Cosmos database service. """ -from six.moves import xrange from azure.cosmos.exceptions import CosmosHttpResponseError from azure.cosmos._execution_context.execution_dispatcher import _is_partitioned_execution_info, _get_partitioned_execution_info from azure.cosmos._execution_context.aio import multi_execution_aggregator @@ -183,7 +182,7 @@ async def fetch_next_block(self): """ results = [] - for _ in xrange(self._page_size): + for _ in range(self._page_size): try: results.append(await self.__anext__()) except StopAsyncIteration: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py index fbeaee482e90..ca272aa36426 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_asynchronous_request.py @@ -25,8 +25,7 @@ import json import time -from six.moves.urllib.parse import urlparse -from typing import AnyStr +from urllib.parse import urlparse from azure.core.exceptions import DecodeError # type: ignore from .. import exceptions @@ -167,7 +166,7 @@ async def AsynchronousRequest( :rtype: tuple of (dict dict) """ request.data = _request_body_from_data(request_data) - if request.data and isinstance(request.data, AnyStr): + if request.data and isinstance(request.data, str): request.headers[http_constants.HttpHeaders.ContentLength] = len(request.data) elif request.data is None: request.headers[http_constants.HttpHeaders.ContentLength] = 0 diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 98ed4db8a3f7..49f2f99c8a25 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -26,8 +26,8 @@ """ # https://github.com/PyCQA/pylint/issues/3112 # Currently pylint is locked to 2.3.3 and this is fixed in 2.4.4 -from typing import AnyStr, Dict, Any, Optional # pylint: disable=unused-import -from six.moves.urllib.parse import urlparse +from typing import Dict, Any, Optional # pylint: disable=unused-import +from urllib.parse import urlparse from urllib3.util.retry import Retry from azure.core.async_paging import AsyncItemPaged from azure.core import AsyncPipelineClient @@ -2267,15 +2267,15 @@ def __CheckAndUnifyQueryFormat(self, query_body): self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query ): - if not isinstance(query_body, dict) and not isinstance(query_body, AnyStr): + if not isinstance(query_body, dict) and not isinstance(query_body, str): raise TypeError("query body must be a dict or string.") if isinstance(query_body, dict) and not query_body.get("query"): raise ValueError('query body must have valid query text with key "query".') - if isinstance(query_body, AnyStr): + if isinstance(query_body, str): return {"query": query_body} elif ( self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery - and not isinstance(query_body, AnyStr) + and not isinstance(query_body, str) ): raise TypeError("query body must be a string.") else: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index b1b9132064fd..78e63b98e528 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -24,7 +24,7 @@ """ import asyncio -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from .. import _constants as constants from .. import exceptions from .._location_cache import LocationCache @@ -38,7 +38,7 @@ class _GlobalEndpointManager(object): """ def __init__(self, client): - self.Client = client + self.client = client self.EnableEndpointDiscovery = client.connection_policy.EnableEndpointDiscovery self.PreferredLocations = client.connection_policy.PreferredLocations self.DefaultEndpoint = client.url_connection @@ -139,7 +139,7 @@ async def _GetDatabaseAccountStub(self, endpoint, **kwargs): This can be used for mocking purposes as well. """ - return await self.Client.GetDatabaseAccount(endpoint, **kwargs) + return await self.client.GetDatabaseAccount(endpoint, **kwargs) @staticmethod def GetLocationalEndpoint(default_endpoint, location_name): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 9da05775e7cd..2c9ece13eba3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -22,7 +22,7 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, AnyStr, Dict, List, Optional, Union, Iterable, cast +from typing import Any, Dict, List, Optional, Union, Iterable, cast from azure.core.tracing.decorator import distributed_trace # pylint: disable=unused-import from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore @@ -94,13 +94,13 @@ def scripts(self): def _get_document_link(self, item_or_link): # type: (Union[Dict[str, Any], str]) -> str - if isinstance(item_or_link, AnyStr): + if isinstance(item_or_link, str): return u"{}/docs/{}".format(self.container_link, item_or_link) return item_or_link["_self"] def _get_conflict_link(self, conflict_or_link): # type: (Union[Dict[str, Any], str]) -> str - if isinstance(conflict_or_link, AnyStr): + if isinstance(conflict_or_link, str): return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) return conflict_or_link["_self"] @@ -112,7 +112,6 @@ async def _set_partition_key(self, partition_key): @distributed_trace_async async def read( self, - populate_query_metrics=None, # type: Optional[bool] populate_partition_key_range_statistics=None, # type: Optional[bool] populate_quota_info=None, # type: Optional[bool] **kwargs # type: Any @@ -120,7 +119,6 @@ async def read( # type: (...) -> Dict[str, Any] """Read the container properties. - :param populate_query_metrics: Enable returning query metrics in response headers. :param populate_partition_key_range_statistics: Enable returning partition key range statistics in response headers. :param populate_quota_info: Enable returning collection storage quota information in response headers. @@ -134,8 +132,6 @@ async def read( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics if populate_partition_key_range_statistics is not None: request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics if populate_quota_info is not None: @@ -155,7 +151,6 @@ async def read( async def create_item( self, body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] pre_trigger_include=None, # type: Optional[str] post_trigger_include=None, # type: Optional[str] indexing_directive=None, # type: Optional[Any] @@ -168,7 +163,6 @@ async def create_item( :func:`ContainerProxy.upsert_item` method. :param body: A dict-like object representing the item to create. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :param indexing_directive: Indicate whether the document should be omitted from indexing. @@ -187,8 +181,6 @@ async def create_item( response_hook = kwargs.pop('response_hook', None) request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) - if populate_query_metrics: - request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include if post_trigger_include is not None: @@ -206,10 +198,8 @@ async def create_item( @distributed_trace_async async def read_item( self, - item, # type: Union[str, Dict[str, Any]] + item_id, # type: str partition_key, # type: Any - populate_query_metrics=None, # type: Optional[bool] - post_trigger_include=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Dict[str, str] @@ -217,8 +207,6 @@ async def read_item( :param item: The ID (name) or dict representing item to retrieve. :param partition_key: Partition key for the item to retrieve. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -228,7 +216,7 @@ async def read_item( .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START update_item] :end-before: [END update_item] :language: python @@ -236,15 +224,11 @@ async def read_item( :caption: Get an item from the database and update one of its properties: :name: update_item """ - doc_link = self._get_document_link(item) + doc_link = self._get_document_link(item_id) request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: request_options["partitionKey"] = await self._set_partition_key(partition_key) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - if post_trigger_include is not None: - request_options["postTriggerInclude"] = post_trigger_include result = await self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) if response_hook: @@ -255,14 +239,12 @@ async def read_item( def read_all_items( self, max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> Iterable[Dict[str, Any]] """List all the items in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -273,8 +255,6 @@ def read_all_items( response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics if hasattr(response_hook, "clear"): response_hook.clear() @@ -292,7 +272,6 @@ async def query_items( query, # type: str parameters=None, # type: Optional[List[Dict[str, object]]] partition_key=None, # type: Optional[Any] - enable_cross_partition_query=None, # type: Optional[bool] max_item_count=None, # type: Optional[int] enable_scan_in_query=None, # type: Optional[bool] populate_query_metrics=None, # type: Optional[bool] @@ -310,10 +289,8 @@ async def query_items( :param parameters: Optional array of parameters to the query. Each parameter is a dict() with 'name' and 'value' keys. Ignored if no query is provided. - :param partition_key: Specifies the partition key value for the item. - :param enable_cross_partition_query: Allows sending of more than one request to - execute the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. + :param partition_key: Specifies the partition key value for the item. If none is provided, + a cross-partition query will be executed :param max_item_count: Max number of items to be returned in the enumeration operation. :param enable_scan_in_query: Allow scan on the queries which couldn't be served as indexing was opted out on the requested paths. @@ -326,7 +303,7 @@ async def query_items( .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START query_items] :end-before: [END query_items] :language: python @@ -334,7 +311,7 @@ async def query_items( :caption: Get all products that have not been discontinued: :name: query_items - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START query_items_param] :end-before: [END query_items_param] :language: python @@ -344,16 +321,16 @@ async def query_items( """ feed_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if enable_cross_partition_query is not None: - feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics - if partition_key is not None: - feed_options["partitionKey"] = await self._set_partition_key(partition_key) if enable_scan_in_query is not None: feed_options["enableScanInQuery"] = enable_scan_in_query + if partition_key is not None: + feed_options["partitionKey"] = await self._set_partition_key(partition_key) + else: + feed_options["enableCrossPartitionQuery"] = True if hasattr(response_hook, "clear"): response_hook.clear() @@ -421,7 +398,6 @@ def query_items_change_feed( async def upsert_item( self, body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] pre_trigger_include=None, # type: Optional[str] post_trigger_include=None, # type: Optional[str] **kwargs # type: Any @@ -433,7 +409,6 @@ async def upsert_item( does not already exist, it is inserted. :param body: A dict-like object representing the item to update or insert. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. @@ -449,8 +424,6 @@ async def upsert_item( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include if post_trigger_include is not None: @@ -471,7 +444,6 @@ async def replace_item( self, item, # type: Union[str, Dict[str, Any]] body, # type: Dict[str, Any] - populate_query_metrics=None, # type: Optional[bool] pre_trigger_include=None, # type: Optional[str] post_trigger_include=None, # type: Optional[str] **kwargs # type: Any @@ -483,7 +455,6 @@ async def replace_item( :param item: The ID (name) or dict representing item to be replaced. :param body: A dict-like object representing the item to replace. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. @@ -501,8 +472,6 @@ async def replace_item( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include if post_trigger_include is not None: @@ -518,9 +487,8 @@ async def replace_item( @distributed_trace_async async def delete_item( self, - item, # type: Union[Dict[str, Any], str] + item_id, # type: str partition_key, # type: Any - populate_query_metrics=None, # type: Optional[bool] pre_trigger_include=None, # type: Optional[str] post_trigger_include=None, # type: Optional[str] **kwargs # type: Any @@ -532,7 +500,6 @@ async def delete_item( :param item: The ID (name) or dict representing item to be deleted. :param partition_key: Specifies the partition key value for the item. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. @@ -549,22 +516,20 @@ async def delete_item( response_hook = kwargs.pop('response_hook', None) if partition_key is not None: request_options["partitionKey"] = await self._set_partition_key(partition_key) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include - document_link = self._get_document_link(item) + document_link = self._get_document_link(item_id) result = await self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) @distributed_trace_async - async def read_offer(self, **kwargs): + async def read_throughput(self, **kwargs): # type: (Any) -> Offer - """Read the Offer object for this container. + """Read the throughput offer for this container. If no Offer already exists for the container, an exception is raised. @@ -693,7 +658,12 @@ async def query_conflicts( return result @distributed_trace_async - async def get_conflict(self, conflict, partition_key, **kwargs): + async def read_conflict( + self, + conflict, + partition_key, + **kwargs + ): # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] """Get the conflict identified by `conflict`. @@ -717,13 +687,18 @@ async def get_conflict(self, conflict, partition_key, **kwargs): return result @distributed_trace_async - async def delete_conflict(self, conflict, partition_key, **kwargs): + async def delete_conflict( + self, + conflict_id, + partition_key, + **kwargs + ): # type: (Union[str, Dict[str, Any]], Any, Any) -> None """Delete a specified conflict from the container. If the conflict does not already exist in the container, an exception is raised. - :param conflict: The ID (name) or dict representing the conflict to be deleted. + :param conflict: The ID (name) representing the conflict to be deleted. :param partition_key: Partition key for the conflict to delete. :keyword Callable response_hook: A callable invoked with the response metadata. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. @@ -736,7 +711,7 @@ async def delete_conflict(self, conflict, partition_key, **kwargs): request_options["partitionKey"] = await self._set_partition_key(partition_key) result = await self.client_connection.DeleteConflict( - conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + conflict_link=self._get_conflict_link(conflict_id), options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 57f4a8c76219..58d31d64ad0c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -22,7 +22,7 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, AnyStr, Dict, Optional, Union, cast, Iterable, List +from typing import Any, Dict, Optional, Union, cast, Iterable, List from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace @@ -99,7 +99,7 @@ class CosmosClient(object): .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START create_client] :end-before: [END create_client] :language: python @@ -163,7 +163,7 @@ def from_connection_string(cls, conn_str, credential=None, consistency_level="Se @staticmethod def _get_database_link(database_or_id): # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str - if isinstance(database_or_id, AnyStr): + if isinstance(database_or_id, str): return "dbs/{}".format(database_or_id) try: return cast("DatabaseProxy", database_or_id).database_link @@ -176,7 +176,6 @@ def _get_database_link(database_or_id): async def create_database( # pylint: disable=redefined-builtin self, id, # type: str - populate_query_metrics=None, # type: Optional[bool] offer_throughput=None, # type: Optional[int] **kwargs # type: Any ): @@ -185,7 +184,6 @@ async def create_database( # pylint: disable=redefined-builtin Create a new database with the given ID (name). :param id: ID (name) of the database to create. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :param int offer_throughput: The provisioned throughput for this offer. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. @@ -199,7 +197,7 @@ async def create_database( # pylint: disable=redefined-builtin .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START create_database] :end-before: [END create_database] :language: python @@ -210,8 +208,6 @@ async def create_database( # pylint: disable=redefined-builtin request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput @@ -224,7 +220,6 @@ async def create_database( # pylint: disable=redefined-builtin async def create_database_if_not_exists( # pylint: disable=redefined-builtin self, id, # type: str - populate_query_metrics=None, # type: Optional[bool] offer_throughput=None, # type: Optional[int] **kwargs # type: Any ): @@ -239,7 +234,6 @@ async def create_database_if_not_exists( # pylint: disable=redefined-builtin offer throughput if they differ from what is passed in. :param id: ID (name) of the database to read or create. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :param int offer_throughput: The provisioned throughput for this offer. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. @@ -253,51 +247,37 @@ async def create_database_if_not_exists( # pylint: disable=redefined-builtin """ try: database_proxy = self.get_database_client(id) - await database_proxy.read( - populate_query_metrics=populate_query_metrics, - **kwargs - ) + await database_proxy.read(**kwargs) return database_proxy except CosmosResourceNotFoundError: return await self.create_database( id, - populate_query_metrics=populate_query_metrics, offer_throughput=offer_throughput, **kwargs ) - def get_database_client(self, database): - # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy + def get_database_client(self, database_id): + # type: (str) -> DatabaseProxy """Retrieve an existing database with the ID (name) `id`. - :param database: The ID (name), dict representing the properties or - `DatabaseProxy` instance of the database to read. + :param database: The ID (name) representing the properties of the database to read. :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy :returns: A `DatabaseProxy` instance representing the retrieved database. :rtype: ~azure.cosmos.DatabaseProxy """ - if isinstance(database, DatabaseProxy): - id_value = database.id - else: - try: - id_value = database["id"] - except TypeError: - id_value = database - return DatabaseProxy(self.client_connection, id_value) + return DatabaseProxy(self.client_connection, database_id) @distributed_trace def list_databases( self, max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> Iterable[Dict[str, Any]] """List the databases in a Cosmos DB SQL database account. :param int max_item_count: Max number of items to be returned in the enumeration operation. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -308,8 +288,6 @@ def list_databases( response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) if response_hook: @@ -321,9 +299,7 @@ def query_databases( self, query=None, # type: Optional[str] parameters=None, # type: Optional[List[str]] - enable_cross_partition_query=None, # type: Optional[bool] max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> Iterable[Dict[str, Any]] @@ -331,10 +307,7 @@ def query_databases( :param str query: The Azure Cosmos DB SQL query to execute. :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be - served as indexing was opted out on the requested paths. :param int max_item_count: Max number of items to be returned in the enumeration operation. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -343,12 +316,8 @@ def query_databases( """ feed_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if enable_cross_partition_query is not None: - feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.QueryDatabases( query=query if parameters is None else dict(query=query, parameters=parameters), @@ -362,16 +331,14 @@ def query_databases( async def delete_database( self, database, # type: Union[str, DatabaseProxy, Dict[str, Any]] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> None """Delete the database with the given ID (name). - :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` + :param database: The ID (name), dict representing the properties, or :class:`DatabaseProxy` instance of the database to delete. :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource @@ -383,16 +350,14 @@ async def delete_database( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics - + database_link = self._get_database_link(database) await self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) @distributed_trace_async - async def get_database_account(self, **kwargs): + async def _get_database_account(self, **kwargs): # type: (Any) -> DatabaseAccount """Retrieve the database account information. @@ -404,4 +369,4 @@ async def get_database_account(self, **kwargs): result = await self.client_connection.GetDatabaseAccount(**kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) - return result \ No newline at end of file + return result diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index e0b0bfce4d70..a04e92080e80 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -22,7 +22,7 @@ """Interact with databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, AnyStr, List, Dict, Union, cast, Iterable, Optional +from typing import Any, List, Dict, Union, cast, Iterable, Optional import warnings from azure.core.tracing.decorator_async import distributed_trace_async @@ -86,7 +86,7 @@ def __repr__(self): @staticmethod def _get_container_id(container_or_id): # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str - if isinstance(container_or_id, AnyStr): + if isinstance(container_or_id, str): return container_or_id try: return cast("ContainerProxy", container_or_id).id @@ -100,7 +100,7 @@ def _get_container_link(self, container_or_id): def _get_user_link(self, user_or_id): # type: (Union[UserProxy, str, Dict[str, Any]]) -> str - if isinstance(user_or_id, AnyStr): + if isinstance(user_or_id, str): return u"{}/users/{}".format(self.database_link, user_or_id) try: return cast("UserProxy", user_or_id).user_link @@ -115,11 +115,10 @@ async def _get_properties(self): return self._properties @distributed_trace_async - async def read(self, populate_query_metrics=None, **kwargs): + async def read(self, **kwargs): # type: (Optional[bool], Any) -> Dict[str, Any] """Read the database properties. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -132,8 +131,6 @@ async def read(self, populate_query_metrics=None, **kwargs): database_link = CosmosClient._get_database_link(self) request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics self._properties = await self.client_connection.ReadDatabase( database_link, options=request_options, **kwargs @@ -151,7 +148,6 @@ async def create_container( partition_key, # type: Any indexing_policy=None, # type: Optional[Dict[str, Any]] default_ttl=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] offer_throughput=None, # type: Optional[int] unique_key_policy=None, # type: Optional[Dict[str, Any]] conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] @@ -166,7 +162,6 @@ async def create_container( :param partition_key: The partition key to use for the container. :param indexing_policy: The indexing policy to apply to the container. :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :param populate_query_metrics: Enable returning query metrics in response headers. :param offer_throughput: The provisioned throughput for this offer. :param unique_key_policy: The unique key policy to apply to the container. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. @@ -185,7 +180,7 @@ async def create_container( .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START create_container] :end-before: [END create_container] :language: python @@ -193,7 +188,7 @@ async def create_container( :caption: Create a container with default settings: :name: create_container - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START create_container_with_settings] :end-before: [END create_container_with_settings] :language: python @@ -224,8 +219,6 @@ async def create_container( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput @@ -245,7 +238,6 @@ async def create_container_if_not_exists( partition_key, # type: Any indexing_policy=None, # type: Optional[Dict[str, Any]] default_ttl=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] offer_throughput=None, # type: Optional[int] unique_key_policy=None, # type: Optional[Dict[str, Any]] conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] @@ -262,7 +254,6 @@ async def create_container_if_not_exists( :param partition_key: The partition key to use for the container. :param indexing_policy: The indexing policy to apply to the container. :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :param populate_query_metrics: Enable returning query metrics in response headers. :param offer_throughput: The provisioned throughput for this offer. :param unique_key_policy: The unique key policy to apply to the container. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. @@ -284,7 +275,6 @@ async def create_container_if_not_exists( try: container_proxy = self.get_container_client(id) await container_proxy.read( - populate_query_metrics=populate_query_metrics, **kwargs ) return container_proxy @@ -294,25 +284,23 @@ async def create_container_if_not_exists( partition_key=partition_key, indexing_policy=indexing_policy, default_ttl=default_ttl, - populate_query_metrics=populate_query_metrics, offer_throughput=offer_throughput, unique_key_policy=unique_key_policy, conflict_resolution_policy=conflict_resolution_policy, analytical_storage_ttl=analytical_storage_ttl ) - def get_container_client(self, container): - # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy + def get_container_client(self, container_id): + # type: (str) -> ContainerProxy """Get a `ContainerProxy` for a container with specified ID (name). - :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, - or a dict representing the properties of the container to be retrieved. + :param container: The ID (name) of the container to be retrieved. :returns: A `ContainerProxy` instance representing the container. :rtype: ~azure.cosmos.ContainerProxy .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START get_container] :end-before: [END get_container] :language: python @@ -320,23 +308,19 @@ def get_container_client(self, container): :caption: Get an existing container, handling a failure if encountered: :name: get_container """ - if isinstance(container, ContainerProxy): - id_value = container.id - else: - try: - id_value = container["id"] - except TypeError: - id_value = container - return ContainerProxy(self.client_connection, self.database_link, id_value) + return ContainerProxy(self.client_connection, self.database_link, container_id) @distributed_trace - def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): + def list_containers( + self, + max_item_count=None, + **kwargs + ): # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] """List the containers in the database. :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -345,7 +329,7 @@ def list_containers(self, max_item_count=None, populate_query_metrics=None, **kw .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START list_containers] :end-before: [END list_containers] :language: python @@ -357,8 +341,6 @@ def list_containers(self, max_item_count=None, populate_query_metrics=None, **kw response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.ReadContainers( database_link=self.database_link, options=feed_options, **kwargs @@ -373,7 +355,6 @@ def query_containers( query=None, # type: Optional[str] parameters=None, # type: Optional[List[str]] max_item_count=None, # type: Optional[int] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> Iterable[Dict[str, Any]] @@ -382,7 +363,6 @@ def query_containers( :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -393,8 +373,6 @@ def query_containers( response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if populate_query_metrics is not None: - feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.QueryContainers( database_link=self.database_link, @@ -414,7 +392,6 @@ async def replace_container( indexing_policy=None, # type: Optional[Dict[str, Any]] default_ttl=None, # type: Optional[int] conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> ContainerProxy @@ -430,7 +407,6 @@ async def replace_container( :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource has changed, and act according to the condition specified by the `match_condition` parameter. @@ -444,7 +420,7 @@ async def replace_container( .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START reset_container_properties] :end-before: [END reset_container_properties] :language: python @@ -454,8 +430,6 @@ async def replace_container( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics container_id = self._get_container_id(container) container_link = self._get_container_link(container_id) @@ -486,7 +460,6 @@ async def replace_container( async def delete_container( self, container, # type: Union[str, ContainerProxy, Dict[str, Any]] - populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> None @@ -495,7 +468,6 @@ async def delete_container( :param container: The ID (name) of the container to delete. You can either pass in the ID of the container to delete, a :class:`ContainerProxy` instance or a dict representing the properties of the container. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource @@ -507,8 +479,6 @@ async def delete_container( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: - request_options["populateQueryMetrics"] = populate_query_metrics collection_link = self._get_container_link(container) result = await self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) @@ -532,7 +502,7 @@ async def create_user(self, body, **kwargs): .. admonition:: Example: - .. literalinclude:: ../samples/examples.py + .. literalinclude:: ../samples/examples_async.py :start-after: [START create_user] :end-before: [END create_user] :language: python @@ -707,9 +677,9 @@ async def delete_user(self, user, **kwargs): response_hook(self.client_connection.last_response_headers, result) @distributed_trace_async - async def read_offer(self, **kwargs): + async def read_throughput(self, **kwargs): # type: (Any) -> Offer - """Read the Offer object for this database. + """Read the throughput offer for this database. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: Offer for the database. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 6deec2eed3e6..63883c5384cb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -22,7 +22,7 @@ """Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. """ -from typing import Any, AnyStr, List, Dict, Union, Iterable, Optional +from typing import Any, List, Dict, Union, Iterable, Optional from azure.cosmos.aio._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options @@ -53,7 +53,7 @@ def __init__(self, client_connection, container_link, is_system_key): def _get_resource_link(self, script_or_id, typ): # type: (Union[Dict[str, Any], str], str) -> str - if isinstance(script_or_id, AnyStr): + if isinstance(script_or_id, str): return u"{}/{}/{}".format(self.container_link, typ, script_or_id) return script_or_id["_self"] @@ -73,7 +73,13 @@ def list_stored_procedures(self, max_item_count=None, **kwargs): collection_link=self.container_link, options=feed_options, **kwargs ) - def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): + def query_stored_procedures( + self, + query, + parameters=None, + max_item_count=None, + **kwargs + ): # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] """Return all stored procedures matching the given `query`. @@ -94,7 +100,7 @@ def query_stored_procedures(self, query, parameters=None, max_item_count=None, * **kwargs ) - async def get_stored_procedure(self, sproc, **kwargs): + async def read_stored_procedure(self, sproc, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get the stored procedure identified by `id`. @@ -126,7 +132,12 @@ async def create_stored_procedure(self, body, **kwargs): collection_link=self.container_link, sproc=body, options=request_options, **kwargs ) - async def replace_stored_procedure(self, sproc, body, **kwargs): + async def replace_stored_procedure( + self, + sproc, + body, + **kwargs + ): # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] """Replace a specified stored procedure in the container. @@ -167,7 +178,7 @@ async def delete_stored_procedure(self, sproc, **kwargs): async def execute_stored_procedure( self, - sproc, # type: Union[str, Dict[str, Any]] + sproc_id, # type: str partition_key=None, # type: Optional[str] params=None, # type: Optional[List[Any]] enable_script_logging=None, # type: Optional[bool] @@ -178,7 +189,7 @@ async def execute_stored_procedure( If the stored procedure does not already exist in the container, an exception is raised. - :param sproc: The ID (name) or dict representing stored procedure to be executed. + :param sproc: The ID (name) representing the stored procedure to be executed. :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. :param params: List of parameters to be passed to the stored procedure to be executed. :param bool enable_script_logging: Enables or disables script logging for the current request. @@ -199,7 +210,7 @@ async def execute_stored_procedure( request_options["enableScriptLogging"] = enable_script_logging return await self.client_connection.ExecuteStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + sproc_link=self._get_resource_link(sproc_id, ScriptType.StoredProcedure), params=params, options=request_options, **kwargs @@ -242,7 +253,7 @@ def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): **kwargs ) - async def get_trigger(self, trigger, **kwargs): + async def read_trigger(self, trigger, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get a trigger identified by `id`. @@ -350,7 +361,7 @@ def query_user_defined_functions(self, query, parameters=None, max_item_count=No **kwargs ) - async def get_user_defined_function(self, udf, **kwargs): + async def read_user_defined_function(self, udf, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get a user-defined functions identified by `id`. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py index 6215473667ac..1933d8977fa3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -24,7 +24,7 @@ """Create, read, update and delete users in the Azure Cosmos DB SQL API service. """ -from typing import Any, AnyStr, List, Dict, Union, cast, Iterable, Optional +from typing import Any, List, Dict, Union, cast, Iterable, Optional from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace @@ -54,7 +54,7 @@ def __repr__(self): def _get_permission_link(self, permission_or_id): # type: (Union[Permission, str, Dict[str, Any]]) -> str - if isinstance(permission_or_id, AnyStr): + if isinstance(permission_or_id, str): return u"{}/permissions/{}".format(self.user_link, permission_or_id) try: return cast("Permission", permission_or_id).permission_link @@ -146,12 +146,11 @@ def query_permissions( return result @distributed_trace_async - async def get_permission(self, permission, **kwargs): + async def read_permission(self, permission_id, **kwargs): # type: (str, Any) -> Permission """Get the permission identified by `id`. - :param permission: The ID (name), dict representing the properties or :class:`Permission` - instance of the permission to be retrieved. + :param permission: The ID (name) of the permission to be retrieved. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: A dict representing the retrieved permission. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be retrieved. @@ -161,7 +160,7 @@ async def get_permission(self, permission, **kwargs): response_hook = kwargs.pop('response_hook', None) permission_resp = await self.client_connection.ReadPermission( - permission_link=self._get_permission_link(permission), options=request_options, **kwargs + permission_link=self._get_permission_link(permission_id), options=request_options, **kwargs ) # type: Dict[str, str] if response_hook: @@ -273,14 +272,13 @@ async def replace_permission(self, permission, body, **kwargs): ) @distributed_trace_async - async def delete_permission(self, permission, **kwargs): + async def delete_permission(self, permission_id, **kwargs): # type: (str, Any) -> None """Delete the specified permission from the user. If the permission does not already exist, an exception is raised. - :param permission: The ID (name), dict representing the properties or :class:`Permission` - instance of the permission to be replaced. + :param permission: The ID (name) of the permission to be replaced. :keyword Callable response_hook: A callable invoked with the response metadata. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The permission wasn't deleted successfully. :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The permission does not exist for the user. @@ -290,7 +288,7 @@ async def delete_permission(self, permission, **kwargs): response_hook = kwargs.pop('response_hook', None) result = await self.client_connection.DeletePermission( - permission_link=self._get_permission_link(permission), options=request_options, **kwargs + permission_link=self._get_permission_link(permission_id), options=request_options, **kwargs ) if response_hook: diff --git a/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py b/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py index 973c234268d2..ca9ee7d2abfb 100644 --- a/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py +++ b/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py @@ -1,9 +1,8 @@ import azure.cosmos.aio.cosmos_client as cosmos_client import azure.cosmos.exceptions as exceptions import requests -import six import json -from six.moves.urllib.parse import quote as urllib_quote +from urllib.parse import quote as urllib_quote import azure.cosmos.auth as auth import azure.cosmos.partition_key as partition_key import datetime @@ -53,9 +52,7 @@ def create_nonpartitioned_container(db): verify=False) data = response.content - if not six.PY2: - # python 3 compatible: convert data from byte to unicode string - data = data.decode('utf-8') + data = data.decode('utf-8') data = json.loads(data) created_container = db.get_container_client("mycoll") @@ -77,9 +74,7 @@ def create_nonpartitioned_container(db): verify=False) data = response.content - if not six.PY2: - # python 3 compatible: convert data from byte to unicode string - data = data.decode('utf-8') + data = data.decode('utf-8') data = json.loads(data) return created_container, "SalesOrder0" From ad98039b331c79c5ca6d53297dc47154d6fea73c Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 4 Nov 2021 14:08:59 -0400 Subject: [PATCH 33/56] small change in type hints --- sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 2c9ece13eba3..aea3133f4c21 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -156,7 +156,7 @@ async def create_item( indexing_directive=None, # type: Optional[Any] **kwargs # type: Any ): - # type: (...) -> Dict[str, str] + # type: (...) -> Dict[str, Any] """Create an item in the container. To update or replace an existing item, use the @@ -202,7 +202,7 @@ async def read_item( partition_key, # type: Any **kwargs # type: Any ): - # type: (...) -> Dict[str, str] + # type: (...) -> Dict[str, Any] """Get the item identified by `item`. :param item: The ID (name) or dict representing item to retrieve. @@ -402,7 +402,7 @@ async def upsert_item( post_trigger_include=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Dict[str, str] + # type: (...) -> Dict[str, Any] """Insert or update the specified item. If the item already exists in the container, it is replaced. If the item @@ -448,7 +448,7 @@ async def replace_item( post_trigger_include=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Dict[str, str] + # type: (...) -> Dict[str, Any] """Replaces the specified item if it exists in the container. If the item does not already exist in the container, an exception is raised. @@ -664,7 +664,7 @@ async def read_conflict( partition_key, **kwargs ): - # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] + # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, Any] """Get the conflict identified by `conflict`. :param conflict: The ID (name) or dict representing the conflict to retrieve. From f76c59593ba0eddf486e85b650deee05dc758663 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Tue, 9 Nov 2021 13:05:07 -0500 Subject: [PATCH 34/56] updated readme --- sdk/cosmos/azure-cosmos/README.md | 5 ++--- sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py | 11 ++++------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index 45b0e3c2fd66..ab21e13c9aac 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -484,7 +484,7 @@ async with CosmosClient(url, credential=key) as client: ### Queries with the asynchronous client -Queries work the same way for the most part, and results can be directly iterated on, but because queries made by the asynchronous client return AsyncIterable objects, results can't be cast into lists directly; instead, if you need to create lists from your results, use Python's list comprehension to populate a list: +Queries work the same way for the most part, with one exception being the absence of the `enable_cross_partition` flag in the request; queries without a specified partition key value will now by default atempt to do a cross partition query. Results can be directly iterated on, but because queries made by the asynchronous client return AsyncIterable objects, results can't be cast into lists directly; instead, if you need to create lists from your results, use Python's list comprehension to populate a list: ```Python from azure.cosmos.aio import CosmosClient @@ -500,8 +500,7 @@ container = database.get_container_client(container_name) async def create_lists(): results = await container.query_items( - query='SELECT * FROM products p WHERE p.productModel = "Model 2"', - enable_cross_partition_query=True) + query='SELECT * FROM products p WHERE p.productModel = "Model 2"') # Iterating directly on results async for item in results: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index aea3133f4c21..203393288a7e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -619,7 +619,6 @@ async def query_conflicts( self, query, # type: str parameters=None, # type: Optional[List[str]] - enable_cross_partition_query=None, # type: Optional[bool] partition_key=None, # type: Optional[Any] max_item_count=None, # type: Optional[int] **kwargs # type: Any @@ -629,10 +628,8 @@ async def query_conflicts( :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param enable_cross_partition_query: Allows sending of more than one request to execute - the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. - :param partition_key: Specifies the partition key value for the item. + :param partition_key: Specifies the partition key value for the item. If none is passed in, a + cross partition query will be executed. :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: An Iterable of conflicts (dicts). @@ -642,10 +639,10 @@ async def query_conflicts( response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if enable_cross_partition_query is not None: - feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if partition_key is not None: feed_options["partitionKey"] = await self._set_partition_key(partition_key) + else: + feed_options["enableCrossPartitionQuery"] = True result = self.client_connection.QueryConflicts( collection_link=self.container_link, From 3f02a65e42e153608675cdc77f7e4c8a1be3e7d1 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Wed, 10 Nov 2021 16:31:50 -0500 Subject: [PATCH 35/56] fixes based on conversations --- .../azure/cosmos/aio/container.py | 18 ++++----- .../azure/cosmos/aio/cosmos_client.py | 6 ++- .../azure-cosmos/azure/cosmos/aio/database.py | 38 ++++++++++--------- .../azure-cosmos/azure/cosmos/aio/scripts.py | 6 +-- .../azure-cosmos/azure/cosmos/aio/user.py | 20 +++++----- ...access_cosmos_with_resource_token_async.py | 6 ++- .../samples/container_management_async.py | 6 +-- .../azure-cosmos/samples/examples_async.py | 2 +- .../samples/index_management_async.py | 1 + 9 files changed, 56 insertions(+), 47 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 203393288a7e..42e3f6791de7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -198,7 +198,7 @@ async def create_item( @distributed_trace_async async def read_item( self, - item_id, # type: str + item, # type: Union[str, Dict[str, Any]] partition_key, # type: Any **kwargs # type: Any ): @@ -224,7 +224,7 @@ async def read_item( :caption: Get an item from the database and update one of its properties: :name: update_item """ - doc_link = self._get_document_link(item_id) + doc_link = self._get_document_link(item) request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: @@ -270,7 +270,7 @@ def read_all_items( async def query_items( self, query, # type: str - parameters=None, # type: Optional[List[Dict[str, object]]] + parameters=None, # type: Optional[List[Dict[str, Any]]] partition_key=None, # type: Optional[Any] max_item_count=None, # type: Optional[int] enable_scan_in_query=None, # type: Optional[bool] @@ -487,7 +487,7 @@ async def replace_item( @distributed_trace_async async def delete_item( self, - item_id, # type: str + item, # type: Union[str, Dict[str, Any]] partition_key, # type: Any pre_trigger_include=None, # type: Optional[str] post_trigger_include=None, # type: Optional[str] @@ -521,7 +521,7 @@ async def delete_item( if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include - document_link = self._get_document_link(item_id) + document_link = self._get_document_link(item) result = await self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) @@ -618,7 +618,7 @@ def list_conflicts(self, max_item_count=None, **kwargs): async def query_conflicts( self, query, # type: str - parameters=None, # type: Optional[List[str]] + parameters=None, # type: Optional[List[Dict[str, Any]]] partition_key=None, # type: Optional[Any] max_item_count=None, # type: Optional[int] **kwargs # type: Any @@ -686,7 +686,7 @@ async def read_conflict( @distributed_trace_async async def delete_conflict( self, - conflict_id, + conflict, partition_key, **kwargs ): @@ -695,7 +695,7 @@ async def delete_conflict( If the conflict does not already exist in the container, an exception is raised. - :param conflict: The ID (name) representing the conflict to be deleted. + :param conflict: The ID (name) or dict representing the conflict to be deleted. :param partition_key: Partition key for the conflict to delete. :keyword Callable response_hook: A callable invoked with the response metadata. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. @@ -708,7 +708,7 @@ async def delete_conflict( request_options["partitionKey"] = await self._set_partition_key(partition_key) result = await self.client_connection.DeleteConflict( - conflict_link=self._get_conflict_link(conflict_id), options=request_options, **kwargs + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 58d31d64ad0c..4fc56aa7745f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -298,7 +298,7 @@ def list_databases( def query_databases( self, query=None, # type: Optional[str] - parameters=None, # type: Optional[List[str]] + parameters=None, # type: Optional[List[Dict[str, Any]]] max_item_count=None, # type: Optional[int] **kwargs # type: Any ): @@ -306,7 +306,9 @@ def query_databases( """Query the databases in a Cosmos DB SQL database account. :param str query: The Azure Cosmos DB SQL query to execute. - :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param list[dict[str, any]] parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. :param int max_item_count: Max number of items to be returned in the enumeration operation. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index a04e92080e80..147e227a3ecb 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -316,7 +316,7 @@ def list_containers( self, max_item_count=None, **kwargs - ): + ): # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] """List the containers in the database. @@ -353,7 +353,7 @@ def list_containers( def query_containers( self, query=None, # type: Optional[str] - parameters=None, # type: Optional[List[str]] + parameters=None, # type: Optional[List[Dict[str, Any]]] max_item_count=None, # type: Optional[int] **kwargs # type: Any ): @@ -361,7 +361,9 @@ def query_containers( """List the properties for containers in the current database. :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. @@ -523,24 +525,16 @@ async def create_user(self, body, **kwargs): client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) - def get_user_client(self, user): - # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy + def get_user_client(self, user_id): + # type: (str) -> UserProxy """Get a `UserProxy` for a user with specified ID. - :param user: The ID (name), dict representing the properties or :class:`UserProxy` - instance of the user to be retrieved. + :param user: The ID (name) of the user to be retrieved. :returns: A `UserProxy` instance representing the retrieved user. :rtype: ~azure.cosmos.UserProxy """ - if isinstance(user, UserProxy): - id_value = user.id - else: - try: - id_value = user["id"] - except TypeError: - id_value = user - return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + return UserProxy(client_connection=self.client_connection, id=user_id, database_link=self.database_link) @distributed_trace def list_users(self, max_item_count=None, **kwargs): @@ -565,12 +559,20 @@ def list_users(self, max_item_count=None, **kwargs): return result @distributed_trace - def query_users(self, query, parameters=None, max_item_count=None, **kwargs): - # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + def query_users( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[Dict[str, Any]]] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] """Return all users matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. :param max_item_count: Max number of users to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: An Iterable of user properties (dicts). diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 63883c5384cb..5263c6d1de3e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -178,7 +178,7 @@ async def delete_stored_procedure(self, sproc, **kwargs): async def execute_stored_procedure( self, - sproc_id, # type: str + sproc, # type: Union[str, Dict[str, Any]] partition_key=None, # type: Optional[str] params=None, # type: Optional[List[Any]] enable_script_logging=None, # type: Optional[bool] @@ -189,7 +189,7 @@ async def execute_stored_procedure( If the stored procedure does not already exist in the container, an exception is raised. - :param sproc: The ID (name) representing the stored procedure to be executed. + :param sproc: The ID (name) or dict representing the stored procedure to be executed. :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. :param params: List of parameters to be passed to the stored procedure to be executed. :param bool enable_script_logging: Enables or disables script logging for the current request. @@ -210,7 +210,7 @@ async def execute_stored_procedure( request_options["enableScriptLogging"] = enable_script_logging return await self.client_connection.ExecuteStoredProcedure( - sproc_link=self._get_resource_link(sproc_id, ScriptType.StoredProcedure), + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), params=params, options=request_options, **kwargs diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py index 1933d8977fa3..0af69bc6ac5d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -146,11 +146,12 @@ def query_permissions( return result @distributed_trace_async - async def read_permission(self, permission_id, **kwargs): - # type: (str, Any) -> Permission + async def read_permission(self, permission, **kwargs): + # type: (Union[str, Dict[str, Any], Permission], Any) -> Permission """Get the permission identified by `id`. - :param permission: The ID (name) of the permission to be retrieved. + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be retrieved. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: A dict representing the retrieved permission. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be retrieved. @@ -160,7 +161,7 @@ async def read_permission(self, permission_id, **kwargs): response_hook = kwargs.pop('response_hook', None) permission_resp = await self.client_connection.ReadPermission( - permission_link=self._get_permission_link(permission_id), options=request_options, **kwargs + permission_link=self._get_permission_link(permission), options=request_options, **kwargs ) # type: Dict[str, str] if response_hook: @@ -239,7 +240,7 @@ async def upsert_permission(self, body, **kwargs): @distributed_trace_async async def replace_permission(self, permission, body, **kwargs): - # type: (str, Dict[str, Any], Any) -> Permission + # type: (str, Union[str, Dict[str, Any], Permission], Any) -> Permission """Replaces the specified permission if it exists for the user. If the permission does not already exist, an exception is raised. @@ -272,13 +273,14 @@ async def replace_permission(self, permission, body, **kwargs): ) @distributed_trace_async - async def delete_permission(self, permission_id, **kwargs): - # type: (str, Any) -> None + async def delete_permission(self, permission, **kwargs): + # type: (Union[str, Dict[str, Any], Permission], Any) -> None """Delete the specified permission from the user. If the permission does not already exist, an exception is raised. - :param permission: The ID (name) of the permission to be replaced. + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be deleted. :keyword Callable response_hook: A callable invoked with the response metadata. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The permission wasn't deleted successfully. :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The permission does not exist for the user. @@ -288,7 +290,7 @@ async def delete_permission(self, permission_id, **kwargs): response_hook = kwargs.pop('response_hook', None) result = await self.client_connection.DeletePermission( - permission_link=self._get_permission_link(permission_id), options=request_options, **kwargs + permission_link=self._get_permission_link(permission), options=request_options, **kwargs ) if response_hook: diff --git a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py index c77107f02ec4..1571f347233c 100644 --- a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py +++ b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py @@ -24,6 +24,7 @@ # Each time a Container is created the account will be billed for 1 hour of usage based on # the provisioned throughput (RU/s) of that account. # ---------------------------------------------------------------------------------------------------------- + HOST = config.settings["host"] MASTER_KEY = config.settings["master_key"] DATABASE_ID = config.settings["database_id"] @@ -51,7 +52,7 @@ async def create_permission_if_not_exists(user, permission_definition): try: permission = await user.create_permission(permission_definition) except exceptions.CosmosResourceExistsError: - permission = await user.get_permission(permission_definition["id"]) + permission = await user.read_permission(permission_definition["id"]) return permission @@ -233,7 +234,8 @@ async def run_sample(): await token_client_read_item(token_container, USERNAME_2, ITEM_3_ID) await token_client_delete(token_container, USERNAME_2, ITEM_3_ID) - # Closing current token client + # Cleaning up and closing current token client + await token_client.delete_database(DATABASE_ID) await token_client.close() except exceptions.CosmosHttpResponseError as e: diff --git a/sdk/cosmos/azure-cosmos/samples/container_management_async.py b/sdk/cosmos/azure-cosmos/samples/container_management_async.py index 63d2ae695283..26744f9f592c 100644 --- a/sdk/cosmos/azure-cosmos/samples/container_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/container_management_async.py @@ -207,10 +207,10 @@ async def manage_provisioned_throughput(db, id): try: # read the container, so we can get its _self - container = db.get_container_client(container=id) + container = db.get_container_client(id) - # now use its _self to query for Offers - offer = await container.read_offer() + # now use its _self to query for throughput offers + offer = await container.read_throughput() print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(offer.properties['id'], container.id, offer.properties['content']['offerThroughput'])) diff --git a/sdk/cosmos/azure-cosmos/samples/examples_async.py b/sdk/cosmos/azure-cosmos/samples/examples_async.py index 1bd9a4474be8..90c05378faa2 100644 --- a/sdk/cosmos/azure-cosmos/samples/examples_async.py +++ b/sdk/cosmos/azure-cosmos/samples/examples_async.py @@ -23,7 +23,7 @@ async def examples_async(): try: database = await client.create_database(id=database_name) except exceptions.CosmosResourceExistsError: - database = client.get_database_client(database=database_name) + database = client.get_database_client(database_id=database_name) # [END create_database] # Create a container, handling the exception if a container with the diff --git a/sdk/cosmos/azure-cosmos/samples/index_management_async.py b/sdk/cosmos/azure-cosmos/samples/index_management_async.py index 59ee642634a8..23650f307649 100644 --- a/sdk/cosmos/azure-cosmos/samples/index_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/index_management_async.py @@ -654,6 +654,7 @@ async def run_sample(): # 8. Perform Multi Orderby queries using composite indexes await perform_multi_orderby_query(created_db) + print('Sample done, cleaning up sample-generated data') await client.delete_database(DATABASE_ID) await client.close() From e71986914ff7b92dcdef40368ff32f59400e3871 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Wed, 10 Nov 2021 19:14:14 -0500 Subject: [PATCH 36/56] added missing type comments --- .../azure-cosmos/azure/cosmos/aio/container.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 42e3f6791de7..bbb77e12e551 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -657,9 +657,9 @@ async def query_conflicts( @distributed_trace_async async def read_conflict( self, - conflict, - partition_key, - **kwargs + conflict, # type: Union[str, Dict[str, Any]] + partition_key, # type: Any + **kwargs # type: Any ): # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, Any] """Get the conflict identified by `conflict`. @@ -686,9 +686,9 @@ async def read_conflict( @distributed_trace_async async def delete_conflict( self, - conflict, - partition_key, - **kwargs + conflict, # type: Union[str, Dict[str, Any]] + partition_key, # type: Any + **kwargs # type: Any ): # type: (Union[str, Dict[str, Any]], Any, Any) -> None """Delete a specified conflict from the container. From 02c52ee3476e6e8af75222215c893137bb04e3e1 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 22 Nov 2021 20:25:30 -0500 Subject: [PATCH 37/56] update changelog for ci pipeline --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index ff4af76d7f08..14df62dece1c 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -2,6 +2,8 @@ **New features** - Added language native async i/o client +## 4.2.1 (Unreleased) + ## 4.2.0 (2020-10-08) **Bug fixes** From 2cb45510704cb4a30fa7bbee858b74e48f8eed3d Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 29 Nov 2021 17:01:20 -0500 Subject: [PATCH 38/56] added typehints, moved params into keywords, added decorators, made _connection_policy private --- .../azure-cosmos/azure/cosmos/aio/__init__.py | 4 +-- .../aio/_cosmos_client_connection_async.py | 16 ++++++------ .../azure/cosmos/aio/_retry_utility_async.py | 4 +-- .../azure/cosmos/aio/container.py | 21 ++++++++------- .../azure/cosmos/aio/cosmos_client.py | 19 +++++++------- .../azure-cosmos/azure/cosmos/aio/database.py | 11 ++++---- .../azure-cosmos/azure/cosmos/aio/scripts.py | 26 +++++++++++++++++-- 7 files changed, 62 insertions(+), 39 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py index 1c43976e030c..f1b5e4ef3f2a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py @@ -19,7 +19,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -from ._retry_utility_async import ConnectionRetryPolicy from .container import ContainerProxy from .cosmos_client import CosmosClient from .database import DatabaseProxy @@ -31,6 +30,5 @@ "DatabaseProxy", "ContainerProxy", "ScriptsProxy", - "UserProxy", - "ConnectionRetryPolicy" + "UserProxy" ) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 49f2f99c8a25..820cdfc9d203 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -54,7 +54,7 @@ from . import _asynchronous_request as asynchronous_request from . import _global_endpoint_manager_async as global_endpoint_manager_async from .._routing.aio import routing_map_provider -from ._retry_utility_async import ConnectionRetryPolicy +from ._retry_utility_async import _ConnectionRetryPolicy from .. import _session from .. import _utils from ..partition_key import _Undefined, _Empty @@ -158,10 +158,10 @@ def __init__( if isinstance(self.connection_policy.ConnectionRetryConfiguration, AsyncHTTPPolicy): retry_policy = self.connection_policy.ConnectionRetryConfiguration elif isinstance(self.connection_policy.ConnectionRetryConfiguration, int): - retry_policy = ConnectionRetryPolicy(total=self.connection_policy.ConnectionRetryConfiguration) + retry_policy = _ConnectionRetryPolicy(total=self.connection_policy.ConnectionRetryConfiguration) elif isinstance(self.connection_policy.ConnectionRetryConfiguration, Retry): # Convert a urllib3 retry policy to a Pipeline policy - retry_policy = ConnectionRetryPolicy( + retry_policy = _ConnectionRetryPolicy( retry_total=self.connection_policy.ConnectionRetryConfiguration.total, retry_connect=self.connection_policy.ConnectionRetryConfiguration.connect, retry_read=self.connection_policy.ConnectionRetryConfiguration.read, @@ -205,12 +205,12 @@ def __init__( self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) @property - def Session(self): + def _Session(self): """Gets the session object from the client. """ return self.session - @Session.setter - def Session(self, session): + @_Session.setter + def _Session(self, session): """Sets a session object on the document client. This will override the existing session @@ -218,13 +218,13 @@ def Session(self, session): self.session = session @property - def WriteEndpoint(self): + def _WriteEndpoint(self): """Gets the curent write endpoint for a geo-replicated database account. """ return self._global_endpoint_manager.get_write_endpoint() @property - def ReadEndpoint(self): + def _ReadEndpoint(self): """Gets the curent read endpoint for a geo-replicated database account. """ return self._global_endpoint_manager.get_read_endpoint() diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py index 156728a5f56a..c2fa3b5300ab 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_retry_utility_async.py @@ -133,11 +133,11 @@ async def ExecuteFunctionAsync(function, *args, **kwargs): return await function(*args, **kwargs) -class ConnectionRetryPolicy(AsyncRetryPolicy): +class _ConnectionRetryPolicy(AsyncRetryPolicy): def __init__(self, **kwargs): clean_kwargs = {k: v for k, v in kwargs.items() if v is not None} - super(ConnectionRetryPolicy, self).__init__(**clean_kwargs) + super(_ConnectionRetryPolicy, self).__init__(**clean_kwargs) async def send(self, request): """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index bbb77e12e551..1876298c48ab 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -23,6 +23,7 @@ """ from typing import Any, Dict, List, Optional, Union, Iterable, cast +from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator import distributed_trace # pylint: disable=unused-import from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore @@ -151,9 +152,6 @@ async def read( async def create_item( self, body, # type: Dict[str, Any] - pre_trigger_include=None, # type: Optional[str] - post_trigger_include=None, # type: Optional[str] - indexing_directive=None, # type: Optional[Any] **kwargs # type: Any ): # type: (...) -> Dict[str, Any] @@ -163,9 +161,9 @@ async def create_item( :func:`ContainerProxy.upsert_item` method. :param body: A dict-like object representing the item to create. - :param pre_trigger_include: trigger id to be used as pre operation trigger. - :param post_trigger_include: trigger id to be used as post operation trigger. - :param indexing_directive: Indicate whether the document should be omitted from indexing. + :keyword pre_trigger_include: trigger id to be used as pre operation trigger. + :keyword post_trigger_include: trigger id to be used as post operation trigger. + :keyword indexing_directive: Indicate whether the document should be omitted from indexing. :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. @@ -179,6 +177,9 @@ async def create_item( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) + pre_trigger_include = kwargs.pop('pre_trigger_include', None) + post_trigger_include = kwargs.pop('post_trigger_include', None) + indexing_directive = kwargs.pop('indexing_directive', None) request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) if pre_trigger_include is not None: @@ -277,7 +278,7 @@ async def query_items( populate_query_metrics=None, # type: Optional[bool] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """Return all results matching the given `query`. You can use any value for the container name in the FROM clause, but @@ -356,7 +357,7 @@ def query_items_change_feed( max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """Get a sorted list of items that were changed, in the order in which they were modified. :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. @@ -594,7 +595,7 @@ async def replace_throughput(self, throughput, **kwargs): @distributed_trace def list_conflicts(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """List all the conflicts in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. @@ -623,7 +624,7 @@ async def query_conflicts( max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """Return all conflicts matching a given `query`. :param query: The Azure Cosmos DB SQL query to execute. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 4fc56aa7745f..68c8f5042d10 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -23,6 +23,7 @@ """ from typing import Any, Dict, Optional, Union, cast, Iterable, List +from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace @@ -30,7 +31,7 @@ from ..cosmos_client import _parse_connection_str, _build_auth from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options -from ._retry_utility_async import ConnectionRetryPolicy +from ._retry_utility_async import _ConnectionRetryPolicy from .database import DatabaseProxy from ..documents import ConnectionPolicy, DatabaseAccount from ..exceptions import CosmosResourceNotFoundError @@ -74,7 +75,7 @@ def _build_connection_policy(kwargs): policy.RetryOptions = retry connection_retry = kwargs.pop('connection_retry_policy', None) or policy.ConnectionRetryConfiguration if not connection_retry: - connection_retry = ConnectionRetryPolicy( + connection_retry = _ConnectionRetryPolicy( retry_total=total_retries, retry_connect=kwargs.pop('retry_connect', None), retry_read=kwargs.pop('retry_read', None), @@ -95,7 +96,7 @@ class CosmosClient(object): :param str url: The URL of the Cosmos DB account. :param credential: Can be the account key, or a dictionary of resource tokens. :type credential: str or dict[str, str] - :param str consistency_level: Consistency level to use for the session. The default value is "Session". + :keyword str consistency_level: Consistency level to use for the session. The default value is "Session". .. admonition:: Example: @@ -138,7 +139,7 @@ async def close(self): await self.__aexit__() @classmethod - def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): + def from_connection_string(cls, conn_str, credential=None, **kwargs): # type: (str, Optional[Any], str, Any) -> CosmosClient """Create a CosmosClient instance from a connection string. @@ -149,14 +150,14 @@ def from_connection_string(cls, conn_str, credential=None, consistency_level="Se :param credential: Alternative credentials to use instead of the key provided in the connection string. :type credential: str or dict(str, str) - :param str consistency_level: + :keyword str consistency_level: Consistency level to use for the session. The default value is "Session". """ settings = _parse_connection_str(conn_str, credential) return cls( url=settings['AccountEndpoint'], credential=credential or settings['AccountKey'], - consistency_level=consistency_level, + consistency_level=kwargs.get('consistency_level', 'Session') **kwargs ) @@ -274,7 +275,7 @@ def list_databases( max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """List the databases in a Cosmos DB SQL database account. :param int max_item_count: Max number of items to be returned in the enumeration operation. @@ -297,12 +298,12 @@ def list_databases( @distributed_trace def query_databases( self, - query=None, # type: Optional[str] + query, # type: str parameters=None, # type: Optional[List[Dict[str, Any]]] max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """Query the databases in a Cosmos DB SQL database account. :param str query: The Azure Cosmos DB SQL query to execute. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index 147e227a3ecb..1e168bb98d14 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -25,6 +25,7 @@ from typing import Any, List, Dict, Union, cast, Iterable, Optional import warnings +from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace @@ -317,7 +318,7 @@ def list_containers( max_item_count=None, **kwargs ): - # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Optional[bool], Any) -> AsyncItemPaged[Dict[str, Any]] """List the containers in the database. :param max_item_count: Max number of items to be returned in the enumeration operation. @@ -357,7 +358,7 @@ def query_containers( max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """List the properties for containers in the current database. :param query: The Azure Cosmos DB SQL query to execute. @@ -538,7 +539,7 @@ def get_user_client(self, user_id): @distributed_trace def list_users(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """List all the users in the container. :param max_item_count: Max number of users to be returned in the enumeration operation. @@ -561,12 +562,12 @@ def list_users(self, max_item_count=None, **kwargs): @distributed_trace def query_users( self, - query=None, # type: Optional[str] + query, # type: str parameters=None, # type: Optional[List[Dict[str, Any]]] max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """Return all users matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 5263c6d1de3e..aeaa43cc3cff 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -24,6 +24,9 @@ from typing import Any, List, Dict, Union, Iterable, Optional +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + from azure.cosmos.aio._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options from ..partition_key import NonePartitionKeyValue @@ -57,6 +60,7 @@ def _get_resource_link(self, script_or_id, typ): return u"{}/{}/{}".format(self.container_link, typ, script_or_id) return script_or_id["_self"] + @distributed_trace def list_stored_procedures(self, max_item_count=None, **kwargs): # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] """List all stored procedures in the container. @@ -73,6 +77,7 @@ def list_stored_procedures(self, max_item_count=None, **kwargs): collection_link=self.container_link, options=feed_options, **kwargs ) + @distributed_trace def query_stored_procedures( self, query, @@ -100,6 +105,7 @@ def query_stored_procedures( **kwargs ) + @distributed_trace_async async def read_stored_procedure(self, sproc, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get the stored procedure identified by `id`. @@ -115,6 +121,7 @@ async def read_stored_procedure(self, sproc, **kwargs): sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs ) + @distributed_trace_async async def create_stored_procedure(self, body, **kwargs): # type: (Dict[str, Any], Any) -> Dict[str, Any] """Create a new stored procedure in the container. @@ -132,6 +139,7 @@ async def create_stored_procedure(self, body, **kwargs): collection_link=self.container_link, sproc=body, options=request_options, **kwargs ) + @distributed_trace_async async def replace_stored_procedure( self, sproc, @@ -159,6 +167,7 @@ async def replace_stored_procedure( **kwargs ) + @distributed_trace_async async def delete_stored_procedure(self, sproc, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> None """Delete a specified stored procedure from the container. @@ -176,6 +185,7 @@ async def delete_stored_procedure(self, sproc, **kwargs): sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs ) + @distributed_trace_async async def execute_stored_procedure( self, sproc, # type: Union[str, Dict[str, Any]] @@ -216,6 +226,7 @@ async def execute_stored_procedure( **kwargs ) + @distributed_trace def list_triggers(self, max_item_count=None, **kwargs): # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] """List all triggers in the container. @@ -232,6 +243,7 @@ def list_triggers(self, max_item_count=None, **kwargs): collection_link=self.container_link, options=feed_options, **kwargs ) + @distributed_trace def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] """Return all triggers matching the given `query`. @@ -253,6 +265,7 @@ def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): **kwargs ) + @distributed_trace_async async def read_trigger(self, trigger, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get a trigger identified by `id`. @@ -268,6 +281,7 @@ async def read_trigger(self, trigger, **kwargs): trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) + @distributed_trace_async async def create_trigger(self, body, **kwargs): # type: (Dict[str, Any], Any) -> Dict[str, Any] """Create a trigger in the container. @@ -285,6 +299,7 @@ async def create_trigger(self, body, **kwargs): collection_link=self.container_link, trigger=body, options=request_options, **kwargs ) + @distributed_trace_async async def replace_trigger(self, trigger, body, **kwargs): # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] """Replace a specified tigger in the container. @@ -307,6 +322,7 @@ async def replace_trigger(self, trigger, body, **kwargs): **kwargs ) + @distributed_trace_async async def delete_trigger(self, trigger, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> None """Delete a specified trigger from the container. @@ -324,6 +340,7 @@ async def delete_trigger(self, trigger, **kwargs): trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) + @distributed_trace def list_user_defined_functions(self, max_item_count=None, **kwargs): # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] """List all the user-defined functions in the container. @@ -340,6 +357,7 @@ def list_user_defined_functions(self, max_item_count=None, **kwargs): collection_link=self.container_link, options=feed_options, **kwargs ) + @distributed_trace def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] """Return user-defined functions matching a given `query`. @@ -361,14 +379,15 @@ def query_user_defined_functions(self, query, parameters=None, max_item_count=No **kwargs ) + @distributed_trace_async async def read_user_defined_function(self, udf, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] - """Get a user-defined functions identified by `id`. + """Get a user-defined function identified by `id`. :param udf: The ID (name) or dict representing udf to retrieve. :returns: A dict representing the retrieved user-defined function. :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be retrieved. - :rtype: Iterable[dict[str, Any]] + :rtype: dict[str, Any] """ request_options = build_options(kwargs) @@ -376,6 +395,7 @@ async def read_user_defined_function(self, udf, **kwargs): udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs ) + @distributed_trace_async async def create_user_defined_function(self, body, **kwargs): # type: (Dict[str, Any], Any) -> Dict[str, Any] """Create a user-defined function in the container. @@ -393,6 +413,7 @@ async def create_user_defined_function(self, body, **kwargs): collection_link=self.container_link, udf=body, options=request_options, **kwargs ) + @distributed_trace_async async def replace_user_defined_function(self, udf, body, **kwargs): # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] """Replace a specified user-defined function in the container. @@ -415,6 +436,7 @@ async def replace_user_defined_function(self, udf, body, **kwargs): **kwargs ) + @distributed_trace_async async def delete_user_defined_function(self, udf, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> None """Delete a specified user-defined function from the container. From cf20d358c51ff9621050f8b6c46c7e5fc048474c Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 2 Dec 2021 18:52:29 -0500 Subject: [PATCH 39/56] changes based on sync with central sdk --- sdk/cosmos/azure-cosmos/README.md | 2 +- .../azure/cosmos/aio/_query_iterable_async.py | 5 ++ .../azure/cosmos/aio/container.py | 69 +++++++------- .../azure/cosmos/aio/cosmos_client.py | 33 ++++--- .../azure-cosmos/azure/cosmos/aio/database.py | 62 +++++++------ .../azure-cosmos/azure/cosmos/aio/scripts.py | 89 ++++++++++--------- .../azure-cosmos/azure/cosmos/aio/user.py | 27 +++--- ...access_cosmos_with_resource_token_async.py | 2 +- .../samples/container_management_async.py | 2 +- .../samples/document_management_async.py | 2 +- .../azure-cosmos/samples/examples_async.py | 6 +- .../samples/index_management_async.py | 4 +- ...npartitioned_container_operations_async.py | 2 +- 13 files changed, 163 insertions(+), 142 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index ab21e13c9aac..f27c2f986424 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -499,7 +499,7 @@ container_name = 'products' container = database.get_container_client(container_name) async def create_lists(): - results = await container.query_items( + results = container.query_items( query='SELECT * FROM products p WHERE p.productModel = "Model 2"') # Iterating directly on results diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py index b3a8bafb32b9..bf53ac0d1432 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_query_iterable_async.py @@ -77,6 +77,11 @@ def __init__( ) super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) + async def __aiter__(self): + if 'partition_key' in self._options: + self._options['partition_key'] = await self._options['partition_key'] + return self + async def _unpack(self, block): continuation = None if self._client.last_response_headers: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 1876298c48ab..6bcad54c5603 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -22,15 +22,14 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, List, Optional, Union, Iterable, cast +from typing import Any, Dict, List, Optional, Union, cast from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator import distributed_trace # pylint: disable=unused-import from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore -from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options +from .._base import build_options as _build_options from ..exceptions import CosmosResourceNotFoundError from ..http_constants import StatusCodes from ..offer import Offer @@ -131,7 +130,7 @@ async def read( :returns: Dict representing the retrieved container. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_partition_key_range_statistics is not None: request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics @@ -175,7 +174,7 @@ async def create_item( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) pre_trigger_include = kwargs.pop('pre_trigger_include', None) post_trigger_include = kwargs.pop('post_trigger_include', None) @@ -226,7 +225,7 @@ async def read_item( :name: update_item """ doc_link = self._get_document_link(item) - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: request_options["partitionKey"] = await self._set_partition_key(partition_key) @@ -242,17 +241,17 @@ def read_all_items( max_item_count=None, # type: Optional[int] **kwargs # type: Any ): - # type: (...) -> Iterable[Dict[str, Any]] + # type: (...) -> AsyncItemPaged[Dict[str, Any]] """List all the items in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of items (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of items (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -267,8 +266,8 @@ def read_all_items( response_hook(self.client_connection.last_response_headers, items) return items - @distributed_trace_async - async def query_items( + @distributed_trace + def query_items( self, query, # type: str parameters=None, # type: Optional[List[Dict[str, Any]]] @@ -299,8 +298,8 @@ async def query_items( :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of items (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of items (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] .. admonition:: Example: @@ -320,7 +319,7 @@ async def query_items( :caption: Parameterized query to get all products that have been discontinued: :name: query_items_param """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -329,7 +328,7 @@ async def query_items( if enable_scan_in_query is not None: feed_options["enableScanInQuery"] = enable_scan_in_query if partition_key is not None: - feed_options["partitionKey"] = await self._set_partition_key(partition_key) + feed_options["partitionKey"] = self._set_partition_key(partition_key) else: feed_options["enableCrossPartitionQuery"] = True @@ -368,10 +367,10 @@ def query_items_change_feed( :param continuation: e_tag value to be used as continuation for reading change feed. :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of items (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of items (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key_range_id is not None: feed_options["partitionKeyRangeId"] = partition_key_range_id @@ -422,7 +421,7 @@ async def upsert_item( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True if pre_trigger_include is not None: @@ -470,7 +469,7 @@ async def replace_item( :rtype: dict[str, Any] """ item_link = self._get_document_link(item) - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True if pre_trigger_include is not None: @@ -513,7 +512,7 @@ async def delete_item( :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The item does not exist in the container. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: request_options["partitionKey"] = await self._set_partition_key(partition_key) @@ -528,9 +527,9 @@ async def delete_item( response_hook(self.client_connection.last_response_headers, result) @distributed_trace_async - async def read_throughput(self, **kwargs): + async def read_offer(self, **kwargs): # type: (Any) -> Offer - """Read the throughput offer for this container. + """Read the Offer object for this container. If no Offer already exists for the container, an exception is raised. @@ -600,10 +599,10 @@ def list_conflicts(self, max_item_count=None, **kwargs): :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of conflicts (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of conflicts (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -615,8 +614,8 @@ def list_conflicts(self, max_item_count=None, **kwargs): response_hook(self.client_connection.last_response_headers, result) return result - @distributed_trace_async - async def query_conflicts( + @distributed_trace + def query_conflicts( self, query, # type: str parameters=None, # type: Optional[List[Dict[str, Any]]] @@ -633,15 +632,15 @@ async def query_conflicts( cross partition query will be executed. :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of conflicts (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of conflicts (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if partition_key is not None: - feed_options["partitionKey"] = await self._set_partition_key(partition_key) + feed_options["partitionKey"] = self._set_partition_key(partition_key) else: feed_options["enableCrossPartitionQuery"] = True @@ -672,7 +671,7 @@ async def read_conflict( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given conflict couldn't be retrieved. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: request_options["partitionKey"] = await self._set_partition_key(partition_key) @@ -703,7 +702,7 @@ async def delete_conflict( :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The conflict does not exist in the container. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if partition_key is not None: request_options["partitionKey"] = await self._set_partition_key(partition_key) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 68c8f5042d10..5f6fe8158a34 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -22,7 +22,7 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, Optional, Union, cast, Iterable, List +from typing import Any, Dict, Optional, Union, cast, List from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator_async import distributed_trace_async @@ -30,7 +30,7 @@ from ..cosmos_client import _parse_connection_str, _build_auth from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options +from .._base import build_options as _build_options from ._retry_utility_async import _ConnectionRetryPolicy from .database import DatabaseProxy from ..documents import ConnectionPolicy, DatabaseAccount @@ -207,7 +207,7 @@ async def create_database( # pylint: disable=redefined-builtin :name: create_database """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput @@ -257,8 +257,8 @@ async def create_database_if_not_exists( # pylint: disable=redefined-builtin **kwargs ) - def get_database_client(self, database_id): - # type: (str) -> DatabaseProxy + def get_database_client(self, database): + # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy """Retrieve an existing database with the ID (name) `id`. :param database: The ID (name) representing the properties of the database to read. @@ -266,8 +266,15 @@ def get_database_client(self, database_id): :returns: A `DatabaseProxy` instance representing the retrieved database. :rtype: ~azure.cosmos.DatabaseProxy """ + try: + id_value = database.id + except AttributeError: + try: + id_value = database['id'] + except TypeError: + id_value = database - return DatabaseProxy(self.client_connection, database_id) + return DatabaseProxy(self.client_connection, id_value) @distributed_trace def list_databases( @@ -282,10 +289,10 @@ def list_databases( :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of database properties (dicts). - :rtype: Iterable[dict[str, str]] + :returns: An AsyncItemPaged of database properties (dicts). + :rtype: AsyncItemPaged[dict[str, str]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -314,10 +321,10 @@ def query_databases( :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of database properties (dicts). - :rtype: Iterable[dict[str, str]] + :returns: An AsyncItemPaged of database properties (dicts). + :rtype: AsyncItemPaged[dict[str, str]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -351,7 +358,7 @@ async def delete_database( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the database couldn't be deleted. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) database_link = self._get_database_link(database) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index 1e168bb98d14..e628e121a4a0 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -22,7 +22,7 @@ """Interact with databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, cast, Iterable, Optional +from typing import Any, List, Dict, Union, cast, Optional import warnings from azure.core.async_paging import AsyncItemPaged @@ -30,7 +30,7 @@ from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options +from .._base import build_options as _build_options from .container import ContainerProxy from ..offer import Offer from ..http_constants import StatusCodes @@ -130,7 +130,7 @@ async def read(self, **kwargs): from .cosmos_client import CosmosClient database_link = CosmosClient._get_database_link(self) - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) self._properties = await self.client_connection.ReadDatabase( @@ -218,7 +218,7 @@ async def create_container( if analytical_storage_ttl is not None: definition["analyticalStorageTtl"] = analytical_storage_ttl - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput @@ -291,8 +291,8 @@ async def create_container_if_not_exists( analytical_storage_ttl=analytical_storage_ttl ) - def get_container_client(self, container_id): - # type: (str) -> ContainerProxy + def get_container_client(self, container): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy """Get a `ContainerProxy` for a container with specified ID (name). :param container: The ID (name) of the container to be retrieved. @@ -310,7 +310,15 @@ def get_container_client(self, container_id): :name: get_container """ - return ContainerProxy(self.client_connection, self.database_link, container_id) + try: + id_value = container.id + except AttributeError: + try: + id_value = container['id'] + except TypeError: + id_value = container + + return ContainerProxy(self.client_connection, self.database_link, id_value) @distributed_trace def list_containers( @@ -325,8 +333,8 @@ def list_containers( :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of container properties (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of container properties (dicts). + :rtype: AsyncItemPaged[dict[str, Any]] .. admonition:: Example: @@ -338,7 +346,7 @@ def list_containers( :caption: List all containers in the database: :name: list_containers """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -369,10 +377,10 @@ def query_containers( :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of container properties (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of container properties (dicts). + :rtype: AsyncItemPaged[dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -431,7 +439,7 @@ async def replace_container( :caption: Reset the TTL property on a container, and display the updated properties: :name: reset_container_properties """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) container_id = self._get_container_id(container) @@ -480,7 +488,7 @@ async def delete_container( :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the container couldn't be deleted. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) collection_link = self._get_container_link(container) @@ -513,7 +521,7 @@ async def create_user(self, body, **kwargs): :caption: Create a database user: :name: create_user """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) user = await self.client_connection.CreateUser( @@ -544,10 +552,10 @@ def list_users(self, max_item_count=None, **kwargs): :param max_item_count: Max number of users to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of user properties (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of user properties (dicts). + :rtype: AsyncItemPaged[dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -576,10 +584,10 @@ def query_users( Ignored if no query is provided. :param max_item_count: Max number of users to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of user properties (dicts). - :rtype: Iterable[str, Any] + :returns: An AsyncItemPaged of user properties (dicts). + :rtype: AsyncItemPaged[str, Any] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -608,7 +616,7 @@ async def upsert_user(self, body, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user could not be upserted. :rtype: ~azure.cosmos.UserProxy """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) user = await self.client_connection.UpsertUser( @@ -641,7 +649,7 @@ async def replace_user( If the replace failed or the user with given ID does not exist. :rtype: ~azure.cosmos.UserProxy """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) replaced_user = await self.client_connection.ReplaceUser( @@ -670,7 +678,7 @@ async def delete_user(self, user, **kwargs): :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The user does not exist in the container. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) result = await self.client_connection.DeleteUser( @@ -680,9 +688,9 @@ async def delete_user(self, user, **kwargs): response_hook(self.client_connection.last_response_headers, result) @distributed_trace_async - async def read_throughput(self, **kwargs): + async def read_offer(self, **kwargs): # type: (Any) -> Offer - """Read the throughput offer for this database. + """Read the Offer object for this database. :keyword Callable response_hook: A callable invoked with the response metadata. :returns: Offer for the database. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index aeaa43cc3cff..1eb6479aa5e5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -22,13 +22,14 @@ """Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, Iterable, Optional +from typing import Any, List, Dict, Union, Optional +from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace -from azure.cosmos.aio._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options +from azure.cosmos.aio._cosmos_client_connection_async import CosmosClientConnection as _cosmos_client_connection +from .._base import build_options as _build_options from ..partition_key import NonePartitionKeyValue # pylint: disable=protected-access @@ -62,14 +63,14 @@ def _get_resource_link(self, script_or_id, typ): @distributed_trace def list_stored_procedures(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """List all stored procedures in the container. :param int max_item_count: Max number of items to be returned in the enumeration operation. - :returns: An Iterable of stored procedures (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of stored procedures (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -85,16 +86,16 @@ def query_stored_procedures( max_item_count=None, **kwargs ): - # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (str, Optional[List[str]], Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """Return all stored procedures matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. - :returns: An Iterable of stored procedures (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of stored procedures (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -106,7 +107,7 @@ def query_stored_procedures( ) @distributed_trace_async - async def read_stored_procedure(self, sproc, **kwargs): + async def get_stored_procedure(self, sproc, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get the stored procedure identified by `id`. @@ -115,7 +116,7 @@ async def read_stored_procedure(self, sproc, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be retrieved. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.ReadStoredProcedure( sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs @@ -133,7 +134,7 @@ async def create_stored_procedure(self, body, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be created. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.CreateStoredProcedure( collection_link=self.container_link, sproc=body, options=request_options, **kwargs @@ -158,7 +159,7 @@ async def replace_stored_procedure( procedure with given id does not exist. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.ReplaceStoredProcedure( sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), @@ -179,7 +180,7 @@ async def delete_stored_procedure(self, sproc, **kwargs): :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The sproc does not exist in the container. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) await self.client_connection.DeleteStoredProcedure( sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs @@ -209,10 +210,10 @@ async def execute_stored_procedure( :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) if partition_key is not None: request_options["partitionKey"] = ( - CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + _cosmos_client_connection._return_undefined_or_empty_partition_key(self.is_system_key) if partition_key == NonePartitionKeyValue else partition_key ) @@ -228,14 +229,14 @@ async def execute_stored_procedure( @distributed_trace def list_triggers(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """List all triggers in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. - :returns: An Iterable of triggers (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of triggers (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -245,16 +246,16 @@ def list_triggers(self, max_item_count=None, **kwargs): @distributed_trace def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): - # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (str, Optional[List[str]], Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """Return all triggers matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. - :returns: An Iterable of triggers (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of triggers (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -266,7 +267,7 @@ def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): ) @distributed_trace_async - async def read_trigger(self, trigger, **kwargs): + async def get_trigger(self, trigger, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get a trigger identified by `id`. @@ -275,7 +276,7 @@ async def read_trigger(self, trigger, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be retrieved. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.ReadTrigger( trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs @@ -293,7 +294,7 @@ async def create_trigger(self, body, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be created. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.CreateTrigger( collection_link=self.container_link, trigger=body, options=request_options, **kwargs @@ -313,7 +314,7 @@ async def replace_trigger(self, trigger, body, **kwargs): id does not exist. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.ReplaceTrigger( trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), @@ -334,7 +335,7 @@ async def delete_trigger(self, trigger, **kwargs): :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The trigger does not exist in the container. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) await self.client_connection.DeleteTrigger( trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs @@ -342,14 +343,14 @@ async def delete_trigger(self, trigger, **kwargs): @distributed_trace def list_user_defined_functions(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """List all the user-defined functions in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. - :returns: An Iterable of user-defined functions (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of user-defined functions (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -359,16 +360,16 @@ def list_user_defined_functions(self, max_item_count=None, **kwargs): @distributed_trace def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): - # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (str, Optional[List[str]], Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """Return user-defined functions matching a given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. - :returns: An Iterable of user-defined functions (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of user-defined functions (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] """ - feed_options = build_options(kwargs) + feed_options = _build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -380,7 +381,7 @@ def query_user_defined_functions(self, query, parameters=None, max_item_count=No ) @distributed_trace_async - async def read_user_defined_function(self, udf, **kwargs): + async def get_user_defined_function(self, udf, **kwargs): # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """Get a user-defined function identified by `id`. @@ -389,7 +390,7 @@ async def read_user_defined_function(self, udf, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be retrieved. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.ReadUserDefinedFunction( udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs @@ -407,7 +408,7 @@ async def create_user_defined_function(self, body, **kwargs): :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be created. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.CreateUserDefinedFunction( collection_link=self.container_link, udf=body, options=request_options, **kwargs @@ -427,7 +428,7 @@ async def replace_user_defined_function(self, udf, body, **kwargs): with the given id does not exist. :rtype: dict[str, Any] """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) return await self.client_connection.ReplaceUserDefinedFunction( udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), @@ -448,7 +449,7 @@ async def delete_user_defined_function(self, udf, **kwargs): :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The UDF does not exist in the container. :rtype: None """ - request_options = build_options(kwargs) + request_options = _build_options(kwargs) await self.client_connection.DeleteUserDefinedFunction( udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py index 0af69bc6ac5d..67b3da2b0831 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -24,14 +24,15 @@ """Create, read, update and delete users in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, cast, Iterable, Optional +from typing import Any, List, Dict, Union, cast, Optional +from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection_async import CosmosClientConnection from .._base import build_options -from ..permission import Permission +from ..permission import Permission as _permission class UserProxy(object): @@ -90,13 +91,13 @@ async def read(self, **kwargs): @distributed_trace def list_permissions(self, max_item_count=None, **kwargs): - # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """List all permission for the user. :param max_item_count: Max number of permissions to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of permissions (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of permissions (dicts). + :rtype: AsyncItemPaged[dict[str, Any]] """ feed_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -118,15 +119,15 @@ def query_permissions( max_item_count=None, **kwargs ): - # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + # type: (str, Optional[List[str]], Optional[int], Any) -> AsyncItemPaged[Dict[str, Any]] """Return all permissions matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of permissions to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. - :returns: An Iterable of permissions (dicts). - :rtype: Iterable[dict[str, Any]] + :returns: An AsyncItemPaged of permissions (dicts). + :rtype: AsyncItemPaged[dict[str, Any]] """ feed_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) @@ -146,7 +147,7 @@ def query_permissions( return result @distributed_trace_async - async def read_permission(self, permission, **kwargs): + async def get_permission(self, permission, **kwargs): # type: (Union[str, Dict[str, Any], Permission], Any) -> Permission """Get the permission identified by `id`. @@ -167,7 +168,7 @@ async def read_permission(self, permission, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, permission_resp) - return Permission( + return _permission( id=permission_resp["id"], user_link=self.user_link, permission_mode=permission_resp["permissionMode"], @@ -198,7 +199,7 @@ async def create_permission(self, body, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, permission) - return Permission( + return _permission( id=permission["id"], user_link=self.user_link, permission_mode=permission["permissionMode"], @@ -230,7 +231,7 @@ async def upsert_permission(self, body, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, permission) - return Permission( + return _permission( id=permission["id"], user_link=self.user_link, permission_mode=permission["permissionMode"], @@ -264,7 +265,7 @@ async def replace_permission(self, permission, body, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, permission_resp) - return Permission( + return _permission( id=permission_resp["id"], user_link=self.user_link, permission_mode=permission_resp["permissionMode"], diff --git a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py index 1571f347233c..367b4249b827 100644 --- a/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py +++ b/sdk/cosmos/azure-cosmos/samples/access_cosmos_with_resource_token_async.py @@ -102,7 +102,7 @@ async def token_client_delete(container, username, item_id): async def token_client_query(container, username): try: - async for item in await container.query_items( + async for item in container.query_items( query="SELECT * FROM my_container c WHERE c.username=@username", parameters=[{"name": "@username", "value": username}], partition_key=username, diff --git a/sdk/cosmos/azure-cosmos/samples/container_management_async.py b/sdk/cosmos/azure-cosmos/samples/container_management_async.py index 26744f9f592c..96d46124965e 100644 --- a/sdk/cosmos/azure-cosmos/samples/container_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/container_management_async.py @@ -210,7 +210,7 @@ async def manage_provisioned_throughput(db, id): container = db.get_container_client(id) # now use its _self to query for throughput offers - offer = await container.read_throughput() + offer = await container.read_offer() print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(offer.properties['id'], container.id, offer.properties['content']['offerThroughput'])) diff --git a/sdk/cosmos/azure-cosmos/samples/document_management_async.py b/sdk/cosmos/azure-cosmos/samples/document_management_async.py index 75511e74098c..77eb9f36d69c 100644 --- a/sdk/cosmos/azure-cosmos/samples/document_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/document_management_async.py @@ -83,7 +83,7 @@ async def query_items(container, doc_id): # In this case, we do have to await the asynchronous iterator object since logic # within the query_items() method makes network calls to verify the partition key # deifnition in the container - query_items_response = await container.query_items( + query_items_response = container.query_items( query="SELECT * FROM r WHERE r.id=@id", parameters=[ { "name":"@id", "value": doc_id } diff --git a/sdk/cosmos/azure-cosmos/samples/examples_async.py b/sdk/cosmos/azure-cosmos/samples/examples_async.py index 90c05378faa2..efeab8375f58 100644 --- a/sdk/cosmos/azure-cosmos/samples/examples_async.py +++ b/sdk/cosmos/azure-cosmos/samples/examples_async.py @@ -89,7 +89,7 @@ async def examples_async(): # [START query_items] import json - async for item in await container.query_items( + async for item in container.query_items( query='SELECT * FROM products p WHERE p.productModel <> "DISCONTINUED"', enable_cross_partition_query=True, ): @@ -99,7 +99,7 @@ async def examples_async(): # Parameterized queries are also supported. This example # gets all items whose product model has been discontinued. # [START query_items_param] - discontinued_items = await container.query_items( + discontinued_items = container.query_items( query='SELECT * FROM products p WHERE p.productModel = @model AND p.productName="Widget"', parameters=[dict(name="@model", value="DISCONTINUED")], ) @@ -112,7 +112,7 @@ async def examples_async(): # so deletes must be done with the delete_item method # on the container. # [START delete_items] - async for item in await container.query_items( + async for item in container.query_items( query='SELECT * FROM products p WHERE p.productModel = "DISCONTINUED" AND p.productName="Widget"' ): await container.delete_item(item, partition_key="Widget") diff --git a/sdk/cosmos/azure-cosmos/samples/index_management_async.py b/sdk/cosmos/azure-cosmos/samples/index_management_async.py index 23650f307649..53fbde79053d 100644 --- a/sdk/cosmos/azure-cosmos/samples/index_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/index_management_async.py @@ -73,7 +73,7 @@ async def query_entities(parent, entity_type, id = None): if id == None: entities = [entity async for entity in parent.read_all_items()] else: - entities = [entity async for entity in await parent.query_items(find_entity_by_id_query)] + entities = [entity async for entity in parent.query_items(find_entity_by_id_query)] except exceptions.AzureError as e: print("The following error occured while querying for the entity / entities ", entity_type, id if id != None else "") print(e) @@ -114,7 +114,7 @@ async def fetch_all_databases(client): async def query_documents_with_custom_query(container, query_with_optional_parameters, message = "Document(s) found by query: "): try: - results = await container.query_items(query_with_optional_parameters, enable_cross_partition_query=True) + results = container.query_items(query_with_optional_parameters, enable_cross_partition_query=True) print(message) async for doc in results: print(doc) diff --git a/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py b/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py index ca9ee7d2abfb..9b24cfe71533 100644 --- a/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py +++ b/sdk/cosmos/azure-cosmos/samples/nonpartitioned_container_operations_async.py @@ -141,7 +141,7 @@ async def query_items(container, doc_id): print('\n1.4 Querying for an Item by Id\n') # enable_cross_partition_query should be set to True as the container is partitioned - items = [item async for item in await container.query_items( + items = [item async for item in container.query_items( query="SELECT * FROM r WHERE r.id=@id", parameters=[ {"name": "@id", "value": doc_id} From f456817544999bb25929af314c1ca3f15f99828e Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 2 Dec 2021 19:44:15 -0500 Subject: [PATCH 40/56] remove is_system_key from scripts (only used in execute_sproc) is_system_key verifies that an empty partition key is properly dealt with if ['partitionKey']['systemKey'] exists in the container options - however, we do not allow containers to be created with empty partition key values in the python sdk, so the functionality is needless --- sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py | 9 ++------- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 6bcad54c5603..4b6bd6ac031e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -89,7 +89,7 @@ async def is_system_key(self): def scripts(self): # type: () -> ScriptsProxy if self._scripts is None: - self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + self._scripts = ScriptsProxy(self.client_connection, self.container_link) return cast('ScriptsProxy', self._scripts) def _get_document_link(self, item_or_link): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 1eb6479aa5e5..9b860e770829 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -49,11 +49,10 @@ class ScriptsProxy(object): :func:`ContainerProxy.scripts` attribute. """ - def __init__(self, client_connection, container_link, is_system_key): + def __init__(self, client_connection, container_link): # type: (CosmosClientConnection, str, bool) -> None self.client_connection = client_connection self.container_link = container_link - self.is_system_key = is_system_key def _get_resource_link(self, script_or_id, typ): # type: (Union[Dict[str, Any], str], str) -> str @@ -212,11 +211,7 @@ async def execute_stored_procedure( request_options = _build_options(kwargs) if partition_key is not None: - request_options["partitionKey"] = ( - _cosmos_client_connection._return_undefined_or_empty_partition_key(self.is_system_key) - if partition_key == NonePartitionKeyValue - else partition_key - ) + request_options["partitionKey"] = partition_key if enable_script_logging is not None: request_options["enableScriptLogging"] = enable_script_logging From ea9bd16b4141d7034dc46a9e39cf06a3d60da70b Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 3 Dec 2021 10:07:54 -0500 Subject: [PATCH 41/56] Revert "remove is_system_key from scripts (only used in execute_sproc)" Reverting last commit, will find way to init is_system_key for now --- sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 4b6bd6ac031e..6bcad54c5603 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -89,7 +89,7 @@ async def is_system_key(self): def scripts(self): # type: () -> ScriptsProxy if self._scripts is None: - self._scripts = ScriptsProxy(self.client_connection, self.container_link) + self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) return cast('ScriptsProxy', self._scripts) def _get_document_link(self, item_or_link): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 9b860e770829..1eb6479aa5e5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -49,10 +49,11 @@ class ScriptsProxy(object): :func:`ContainerProxy.scripts` attribute. """ - def __init__(self, client_connection, container_link): + def __init__(self, client_connection, container_link, is_system_key): # type: (CosmosClientConnection, str, bool) -> None self.client_connection = client_connection self.container_link = container_link + self.is_system_key = is_system_key def _get_resource_link(self, script_or_id, typ): # type: (Union[Dict[str, Any], str], str) -> str @@ -211,7 +212,11 @@ async def execute_stored_procedure( request_options = _build_options(kwargs) if partition_key is not None: - request_options["partitionKey"] = partition_key + request_options["partitionKey"] = ( + _cosmos_client_connection._return_undefined_or_empty_partition_key(self.is_system_key) + if partition_key == NonePartitionKeyValue + else partition_key + ) if enable_script_logging is not None: request_options["enableScriptLogging"] = enable_script_logging From 709d2eb0e6a675c0e6ecf1bd0786286bb35ed294 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 3 Dec 2021 13:50:02 -0500 Subject: [PATCH 42/56] async script proxy using composition --- sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py | 2 +- sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 6bcad54c5603..50090b2361ee 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -89,7 +89,7 @@ async def is_system_key(self): def scripts(self): # type: () -> ScriptsProxy if self._scripts is None: - self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + self._scripts = ScriptsProxy(self, self.client_connection, self.container_link) return cast('ScriptsProxy', self._scripts) def _get_document_link(self, item_or_link): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 1eb6479aa5e5..132588a701f8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -49,11 +49,11 @@ class ScriptsProxy(object): :func:`ContainerProxy.scripts` attribute. """ - def __init__(self, client_connection, container_link, is_system_key): - # type: (CosmosClientConnection, str, bool) -> None + def __init__(self, container, client_connection, container_link): + # type: (ContainerProxy, CosmosClientConnection, str) -> None self.client_connection = client_connection self.container_link = container_link - self.is_system_key = is_system_key + self.container_proxy = container def _get_resource_link(self, script_or_id, typ): # type: (Union[Dict[str, Any], str], str) -> str @@ -213,7 +213,7 @@ async def execute_stored_procedure( request_options = _build_options(kwargs) if partition_key is not None: request_options["partitionKey"] = ( - _cosmos_client_connection._return_undefined_or_empty_partition_key(self.is_system_key) + _cosmos_client_connection._return_undefined_or_empty_partition_key(await self.container_proxy.is_system_key) if partition_key == NonePartitionKeyValue else partition_key ) From 3277dd83177e07e88b1744c11e31f3afaafa8427 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Fri, 3 Dec 2021 17:41:15 -0500 Subject: [PATCH 43/56] pylint --- .../aio/base_execution_context.py | 2 +- .../aio/execution_dispatcher.py | 11 ++++++----- .../aio/multi_execution_aggregator.py | 6 +++--- .../_execution_context/endpoint_component.py | 2 +- .../cosmos/_routing/aio/routing_map_provider.py | 3 ++- .../azure-cosmos/azure/cosmos/aio/__init__.py | 2 +- .../cosmos/aio/_cosmos_client_connection_async.py | 15 ++++++++------- .../cosmos/aio/_global_endpoint_manager_async.py | 2 +- .../azure-cosmos/azure/cosmos/aio/container.py | 7 ++++--- .../azure/cosmos/aio/cosmos_client.py | 8 +++++--- .../azure-cosmos/azure/cosmos/aio/database.py | 2 +- .../azure-cosmos/azure/cosmos/aio/scripts.py | 3 ++- sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py | 3 ++- 13 files changed, 37 insertions(+), 29 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py index b8027a611cee..07a4422814a2 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/base_execution_context.py @@ -139,7 +139,7 @@ async def _fetch_items_helper_no_retries(self, fetch_function): async def _fetch_items_helper_with_retries(self, fetch_function): async def callback(): return await self._fetch_items_helper_no_retries(fetch_function) - + return await _retry_utility_async.ExecuteAsync(self._client, self._client._global_endpoint_manager, callback) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py index adc3393da5ac..0a1bd0fa97b9 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/execution_dispatcher.py @@ -23,13 +23,14 @@ Cosmos database service. """ -from azure.cosmos.exceptions import CosmosHttpResponseError -from azure.cosmos._execution_context.execution_dispatcher import _is_partitioned_execution_info, _get_partitioned_execution_info +from azure.cosmos._execution_context.aio import endpoint_component from azure.cosmos._execution_context.aio import multi_execution_aggregator -from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase, _DefaultQueryExecutionContext +from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context.aio.base_execution_context import _DefaultQueryExecutionContext +from azure.cosmos._execution_context.execution_dispatcher import _is_partitioned_execution_info from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo -from azure.cosmos._execution_context.aio import endpoint_component from azure.cosmos.documents import _DistinctType +from azure.cosmos.exceptions import CosmosHttpResponseError from azure.cosmos.http_constants import StatusCodes # pylint: disable=protected-access @@ -188,4 +189,4 @@ async def fetch_next_block(self): except StopAsyncIteration: # no more results break - return results \ No newline at end of file + return results diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py index 1758e825ae18..bf3547b8abd3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py @@ -78,6 +78,8 @@ def __init__(self, client, resource_link, query, options, partitioned_query_ex_i else: self._document_producer_comparator = document_producer._PartitionKeyRangeDocumentProduerComparator() + self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() + async def __anext__(self): """Returns the next result @@ -101,7 +103,7 @@ async def __anext__(self): return res raise StopAsyncIteration - def fetch_next_block(self): + async def fetch_next_block(self): raise NotImplementedError("You should use pipeline's fetch_next_block.") @@ -145,8 +147,6 @@ async def _configure_partition_ranges(self): self._createTargetPartitionQueryExecutionContext(partitionTargetRange) ) - self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() - for targetQueryExContext in targetPartitionQueryExecutionContextList: try: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 4176451c6532..9a948f5f716d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -209,4 +209,4 @@ def __next__(self): return res raise StopIteration - next = __next__ # Python 2 compatibility. \ No newline at end of file + next = __next__ # Python 2 compatibility. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py index b3dae3ccb515..817531275040 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/aio/routing_map_provider.py @@ -165,7 +165,8 @@ async def get_overlapping_ranges(self, collection_link, partition_key_ranges): else: queryRange = currentProvidedRange - overlappingRanges = await PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) + overlappingRanges = await PartitionKeyRangeCache.get_overlapping_ranges(self, + collection_link, queryRange) assert overlappingRanges, "code bug: returned overlapping ranges for queryRange {} is empty".format( queryRange ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py index f1b5e4ef3f2a..606b8665cfa2 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/__init__.py @@ -31,4 +31,4 @@ "ContainerProxy", "ScriptsProxy", "UserProxy" -) \ No newline at end of file +) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py index 820cdfc9d203..f6cf37b38a5c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -230,8 +230,9 @@ def _ReadEndpoint(self): return self._global_endpoint_manager.get_read_endpoint() async def _setup(self): - if not 'database_account' in self._setup_kwargs: - self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount(**self._setup_kwargs) + if 'database_account' not in self._setup_kwargs: + self._setup_kwargs['database_account'] = await self._global_endpoint_manager._GetDatabaseAccount( + **self._setup_kwargs) await self._global_endpoint_manager.force_refresh(self._setup_kwargs['database_account']) def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use @@ -1809,10 +1810,10 @@ async def fetch_fn(options): ) return AsyncItemPaged( - self, - query, - options, - fetch_function=fetch_fn, + self, + query, + options, + fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable ) @@ -2489,4 +2490,4 @@ def __ValidateResource(resource): if id_[-1] == " ": raise ValueError("Id ends with a space.") except AttributeError: - raise_with_traceback(TypeError, message="Id type must be a string.") \ No newline at end of file + raise_with_traceback(TypeError, message="Id type must be a string.") diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py index 78e63b98e528..0a247e4d6529 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -165,4 +165,4 @@ def GetLocationalEndpoint(default_endpoint, location_name): ) return locational_endpoint - return None \ No newline at end of file + return None diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py index 50090b2361ee..cbfa52bec300 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/container.py @@ -61,6 +61,7 @@ def __init__(self, client_connection, database_link, id, properties=None): # py self.client_connection = client_connection self.id = id self._properties = properties + self.database_link = database_link self.container_link = u"{}/colls/{}".format(database_link, self.id) self._is_system_key = None self._scripts = None # type: Optional[ScriptsProxy] @@ -554,7 +555,7 @@ async def read_offer(self, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, offers) - + return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) @distributed_trace_async @@ -628,7 +629,7 @@ def query_conflicts( :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param partition_key: Specifies the partition key value for the item. If none is passed in, a + :param partition_key: Specifies the partition key value for the item. If none is passed in, a cross partition query will be executed. :param max_item_count: Max number of items to be returned in the enumeration operation. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -711,4 +712,4 @@ async def delete_conflict( conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs ) if response_hook: - response_hook(self.client_connection.last_response_headers, result) \ No newline at end of file + response_hook(self.client_connection.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py index 5f6fe8158a34..2ada4a8d0388 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/cosmos_client.py @@ -124,7 +124,7 @@ def __init__(self, url, credential, **kwargs): ) def __repr__(self): - # type () -> str + # type: () -> str return "".format(self.client_connection.url_connection)[:1024] async def __aenter__(self): @@ -136,6 +136,8 @@ async def __aexit__(self, *args): return await self.client_connection.pipeline_client.__aexit__(*args) async def close(self): + # type: () -> None + """Close this instance of CosmosClient.""" await self.__aexit__() @classmethod @@ -215,7 +217,7 @@ async def create_database( # pylint: disable=redefined-builtin result = await self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) - return DatabaseProxy(self.client_connection, id=result["id"], properties=result) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) @distributed_trace_async async def create_database_if_not_exists( # pylint: disable=redefined-builtin @@ -360,7 +362,7 @@ async def delete_database( """ request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - + database_link = self._get_database_link(database) await self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) if response_hook: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py index e628e121a4a0..3cd9ff3899e6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/database.py @@ -746,4 +746,4 @@ async def replace_throughput(self, throughput, **kwargs): data = await self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, data) - return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) \ No newline at end of file + return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py index 132588a701f8..61e339410910 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/scripts.py @@ -213,7 +213,8 @@ async def execute_stored_procedure( request_options = _build_options(kwargs) if partition_key is not None: request_options["partitionKey"] = ( - _cosmos_client_connection._return_undefined_or_empty_partition_key(await self.container_proxy.is_system_key) + _cosmos_client_connection._return_undefined_or_empty_partition_key( + await self.container_proxy.is_system_key) if partition_key == NonePartitionKeyValue else partition_key ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py index 67b3da2b0831..1cdb30433659 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/user.py @@ -82,7 +82,8 @@ async def read(self, **kwargs): request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - self._properties = await self.client_connection.ReadUser(user_link=self.user_link, options=request_options, **kwargs) + self._properties = await self.client_connection.ReadUser(user_link=self.user_link, + options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) From a57cb4d5659880bc86fde4f0b56cad5cd02381b5 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 6 Dec 2021 13:12:27 -0500 Subject: [PATCH 44/56] capitalized constants --- sdk/cosmos/azure-cosmos/README.md | 184 +++++++++++++++--------------- 1 file changed, 92 insertions(+), 92 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index f27c2f986424..ac33264d6651 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -68,9 +68,9 @@ Once you've populated the `ACCOUNT_URI` and `ACCOUNT_KEY` environment variables, from azure.cosmos import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) ``` ## Key concepts @@ -190,14 +190,14 @@ After authenticating your [CosmosClient][ref_cosmosclient], you can work with an from azure.cosmos import CosmosClient, exceptions import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' try: - database = client.create_database(database_name) + database = client.create_database(DATABASE_NAME) except exceptions.CosmosResourceExistsError: - database = client.get_database_client(database_name) + database = client.get_database_client(DATABASE_NAME) ``` ### Create a container @@ -208,17 +208,17 @@ This example creates a container with default settings. If a container with the from azure.cosmos import CosmosClient, PartitionKey, exceptions import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' try: - container = database.create_container(id=container_name, partition_key=PartitionKey(path="/productName")) + container = database.create_container(id=CONTAINER_NAME, partition_key=PartitionKey(path="/productName")) except exceptions.CosmosResourceExistsError: - container = database.get_container_client(container_name) + container = database.get_container_client(CONTAINER_NAME) except exceptions.CosmosHttpResponseError: raise ``` @@ -235,11 +235,11 @@ The options for analytical_storage_ttl are: ```Python -container_name = 'products' +CONTAINER_NAME = 'products' try: - container = database.create_container(id=container_name, partition_key=PartitionKey(path="/productName"),analytical_storage_ttl=-1) + container = database.create_container(id=CONTAINER_NAME, partition_key=PartitionKey(path="/productName"),analytical_storage_ttl=-1) except exceptions.CosmosResourceExistsError: - container = database.get_container_client(container_name) + container = database.get_container_client(CONTAINER_NAME) except exceptions.CosmosHttpResponseError: raise ``` @@ -254,13 +254,13 @@ Retrieve an existing container from the database: from azure.cosmos import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) ``` ### Insert data @@ -273,13 +273,13 @@ This example inserts several items into the container, each with a unique `id`: from azure.cosmos import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) for i in range(1, 10): container.upsert_item({ @@ -298,13 +298,13 @@ To delete items from a container, use [ContainerProxy.delete_item][ref_container from azure.cosmos import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) for item in container.query_items( query='SELECT * FROM products p WHERE p.productModel = "Model 2"', @@ -324,13 +324,13 @@ This example queries a container for items with a specific `id`: from azure.cosmos import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) # Enumerate the returned items import json @@ -367,11 +367,11 @@ from azure.cosmos import CosmosClient import os import json -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) properties = database.read() print(json.dumps(properties)) ``` @@ -385,19 +385,19 @@ from azure.cosmos import CosmosClient import os import json -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) # Database -database_name = 'testDatabase' -database = client.get_database_client(database_name) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) db_offer = database.read_offer() print('Found Offer \'{0}\' for Database \'{1}\' and its throughput is \'{2}\''.format(db_offer.properties['id'], database.id, db_offer.properties['content']['offerThroughput'])) # Container with dedicated throughput only. Will return error "offer not found" for containers without dedicated throughput -container_name = 'testContainer' -container = database.get_container_client(container_name) +CONTAINER_NAME = 'testContainer' +container = database.get_container_client(CONTAINER_NAME) container_offer = container.read_offer() print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(container_offer.properties['id'], container.id, container_offer.properties['content']['offerThroughput'])) ``` @@ -412,13 +412,13 @@ from azure.cosmos import CosmosClient, PartitionKey import os import json -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) database.replace_container( container, @@ -440,13 +440,13 @@ The asynchronous cosmos client looks and works in a very similar fashion to the from azure.cosmos.aio import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) async def create_items(): for i in range(1, 10): @@ -465,14 +465,14 @@ It is also worth pointing out that the asynchronous client has to be closed manu from azure.cosmos.aio import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -database_name = 'testDatabase' -container_name = 'products' +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +DATABASE_NAME = 'testDatabase' +CONTAINER_NAME = 'products' -async with CosmosClient(url, credential=key) as client: - database = client.get_database_client(database_name) - container = database.get_container_client(container_name) +async with CosmosClient(URL, credential=KEY) as client: + database = client.get_database_client(DATABASE_NAME) + container = database.get_container_client(CONTAINER_NAME) for i in range(1, 10): await container.upsert_item({ 'id': 'item{0}'.format(i), @@ -484,19 +484,19 @@ async with CosmosClient(url, credential=key) as client: ### Queries with the asynchronous client -Queries work the same way for the most part, with one exception being the absence of the `enable_cross_partition` flag in the request; queries without a specified partition key value will now by default atempt to do a cross partition query. Results can be directly iterated on, but because queries made by the asynchronous client return AsyncIterable objects, results can't be cast into lists directly; instead, if you need to create lists from your results, use Python's list comprehension to populate a list: +Queries work the same way for the most part, with one exception being the absence of the `enable_cross_partition` flag in the request; queries without a specified partition key value will now by default attempt to do a cross partition query. Results can be directly iterated on, but because queries made by the asynchronous client return AsyncIterable objects, results can't be cast into lists directly; instead, if you need to create lists from your results, use Python's list comprehension to populate a list: ```Python from azure.cosmos.aio import CosmosClient import os -url = os.environ['ACCOUNT_URI'] -key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, credential=key) -database_name = 'testDatabase' -database = client.get_database_client(database_name) -container_name = 'products' -container = database.get_container_client(container_name) +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) async def create_lists(): results = container.query_items( @@ -522,7 +522,7 @@ For example, if you try to create a container using an ID (name) that's already ```Python try: - database.create_container(id=container_name, partition_key=PartitionKey(path="/productName")) + database.create_container(id=CONTAINER_NAME, partition_key=PartitionKey(path="/productName")) except exceptions.CosmosResourceExistsError: print("""Error creating container HTTP status code 409: The ID (name) provided for the container is already in use. @@ -552,13 +552,13 @@ handler = logging.StreamHandler(stream=sys.stdout) logger.addHandler(handler) # This client will log detailed information about its HTTP sessions, at DEBUG level -client = CosmosClient(url, credential=key, logging_enable=True) +client = CosmosClient(URL, credential=KEY, logging_enable=True) ``` Similarly, `logging_enable` can enable detailed logging for a single operation, even when it isn't enabled for the client: ```py -database = client.create_database(database_name, logging_enable=True) +database = client.create_database(DATABASE_NAME, logging_enable=True) ``` ## Next steps From 014578b47ade0ee902106c1587fcb51e691c220b Mon Sep 17 00:00:00 2001 From: Simon Moreno <30335873+simorenoh@users.noreply.github.com> Date: Mon, 6 Dec 2021 16:24:28 -0500 Subject: [PATCH 45/56] Apply suggestions from code review Clarifying comments for README Co-authored-by: Gahl Levy <75269480+gahl-levy@users.noreply.github.com> --- sdk/cosmos/azure-cosmos/README.md | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index ac33264d6651..db904f7d1022 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -434,7 +434,7 @@ For more information on TTL, see [Time to Live for Azure Cosmos DB data][cosmos_ ### Using the asynchronous client -The asynchronous cosmos client looks and works in a very similar fashion to the already existing client, with the exception of its package within the sdk and the need of using async/await keywords in order to interact with it. +The asynchronous cosmos client is a separate client that looks and works in a similar fashion to the existing synchronous client. However, the async client needs to be imported separately and its methods need to be used with the async/await keywords. ```Python from azure.cosmos.aio import CosmosClient @@ -449,14 +449,14 @@ CONTAINER_NAME = 'products' container = database.get_container_client(CONTAINER_NAME) async def create_items(): - for i in range(1, 10): + for i in range(10): await container.upsert_item({ 'id': 'item{0}'.format(i), 'productName': 'Widget', 'productModel': 'Model {0}'.format(i) } ) - await client.close() + await client.close() # the async client must be closed manually if it's not initialized in a with statement ``` It is also worth pointing out that the asynchronous client has to be closed manually after its use, either by initializing it using async with or calling the close() method directly like shown above. @@ -470,10 +470,10 @@ KEY = os.environ['ACCOUNT_KEY'] DATABASE_NAME = 'testDatabase' CONTAINER_NAME = 'products' -async with CosmosClient(URL, credential=KEY) as client: +async with CosmosClient(URL, credential=KEY) as client: # the with statement will automatically close the async client database = client.get_database_client(DATABASE_NAME) container = database.get_container_client(CONTAINER_NAME) - for i in range(1, 10): + for i in range(10): await container.upsert_item({ 'id': 'item{0}'.format(i), 'productName': 'Widget', @@ -484,7 +484,11 @@ async with CosmosClient(URL, credential=KEY) as client: ### Queries with the asynchronous client -Queries work the same way for the most part, with one exception being the absence of the `enable_cross_partition` flag in the request; queries without a specified partition key value will now by default attempt to do a cross partition query. Results can be directly iterated on, but because queries made by the asynchronous client return AsyncIterable objects, results can't be cast into lists directly; instead, if you need to create lists from your results, use Python's list comprehension to populate a list: +Unlike the synchronous client, the async client does not have an `enable_cross_partition` flag in the request. Queries without a specified partition key value will attempt to do a cross partition query by default. + +Query results can be iterated, but query results return an asynchronous iterator. This means that each object from the iterator is already being "awaited" and does not contain that actual query result, only an awaitable object to asynchronously retrieve it. + +These results can't be cast into lists directly; instead, if you need to create lists from your results, use an async for loop or Python's list comprehension to populate a list: ```Python from azure.cosmos.aio import CosmosClient @@ -502,13 +506,15 @@ async def create_lists(): results = container.query_items( query='SELECT * FROM products p WHERE p.productModel = "Model 2"') - # Iterating directly on results + # iterates on "results" iterator to asynchronously create a complete list of the actual query results + + item_list = [] async for item in results: - print(item) + item_list.append(item) - # Making a list from the results + # Asynchronously creates a complete list of the actual query results. This code performs the same action as the for-loop example above. item_list = [item async for item in results] -``` + await client.close() ## Troubleshooting From 0d7969524a49bf217ee725a958358acf45ccf6dc Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 6 Dec 2021 17:11:03 -0500 Subject: [PATCH 46/56] closing python code snippet --- sdk/cosmos/azure-cosmos/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index db904f7d1022..1170557ccb88 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -515,7 +515,7 @@ async def create_lists(): # Asynchronously creates a complete list of the actual query results. This code performs the same action as the for-loop example above. item_list = [item async for item in results] await client.close() - +``` ## Troubleshooting ### General From fdabea1720e1bacf6af61cea18fd53893a97f71c Mon Sep 17 00:00:00 2001 From: Simon Moreno <30335873+simorenoh@users.noreply.github.com> Date: Tue, 7 Dec 2021 12:41:49 -0500 Subject: [PATCH 47/56] last doc updates --- sdk/cosmos/azure-cosmos/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index 1170557ccb88..b5d56708fe21 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -486,9 +486,9 @@ async with CosmosClient(URL, credential=KEY) as client: # the with statement wil Unlike the synchronous client, the async client does not have an `enable_cross_partition` flag in the request. Queries without a specified partition key value will attempt to do a cross partition query by default. -Query results can be iterated, but query results return an asynchronous iterator. This means that each object from the iterator is already being "awaited" and does not contain that actual query result, only an awaitable object to asynchronously retrieve it. +Query results can be iterated, but the query's raw output returns an asynchronous iterator. This means that each object from the iterator is an awaitable object, and does not yet contain the true query result. In order to obtain the query results you can use an async for loop, which awaits each result as you iterate on the object, or manually await each query result as you iterate over the asynchronous iterator. -These results can't be cast into lists directly; instead, if you need to create lists from your results, use an async for loop or Python's list comprehension to populate a list: +Since the query results are an asynchronous iterator, they can't be cast into lists directly; instead, if you need to create lists from your results, use an async for loop or Python's list comprehension to populate a list: ```Python from azure.cosmos.aio import CosmosClient From 016d0dd11057360b0e80d92ca6778144ea61941c Mon Sep 17 00:00:00 2001 From: Travis Prescott Date: Tue, 7 Dec 2021 12:10:31 -0600 Subject: [PATCH 48/56] Update sdk/cosmos/azure-cosmos/CHANGELOG.md Co-authored-by: Simon Moreno <30335873+simorenoh@users.noreply.github.com> --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index 14df62dece1c..3ff54d723364 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -2,7 +2,6 @@ **New features** - Added language native async i/o client -## 4.2.1 (Unreleased) ## 4.2.0 (2020-10-08) From 8228aa9cc5b0551a5779b18d8741325615e2f1ac Mon Sep 17 00:00:00 2001 From: simorenoh Date: Tue, 7 Dec 2021 14:59:53 -0500 Subject: [PATCH 49/56] version update --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 3 +-- sdk/cosmos/azure-cosmos/azure/cosmos/_version.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index 3ff54d723364..3535f5f503c5 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -1,8 +1,7 @@ -## 4.3.0 (Unreleased) +## 4.3.0b1 (Unreleased) **New features** - Added language native async i/o client - ## 4.2.0 (2020-10-08) **Bug fixes** diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_version.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_version.py index c44af2abbf2d..402cc1f5a640 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_version.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_version.py @@ -19,4 +19,4 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -VERSION = "4.2.1" +VERSION = "4.3.0b1" From 7e8e95322883052059c31d719f26f90119edb8d1 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 13 Dec 2021 15:41:42 -0500 Subject: [PATCH 50/56] cosmos updates for release --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 10 ++++++---- sdk/cosmos/azure-cosmos/setup.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index 3535f5f503c5..c1e1adb78610 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -1,8 +1,10 @@ -## 4.3.0b1 (Unreleased) +## Release History + +### 4.3.0b1 (2021-12-14) **New features** - Added language native async i/o client -## 4.2.0 (2020-10-08) +### 4.2.0 (2020-10-08) **Bug fixes** - Fixed bug where continuation token is not honored when query_iterable is used to get results by page. Issue #13265. @@ -11,7 +13,7 @@ **New features** - Added support for passing partitionKey while querying changefeed. Issue #11689. -## 4.1.0 (2020-08-10) +### 4.1.0 (2020-08-10) - Added deprecation warning for "lazy" indexing mode. The backend no longer allows creating containers with this mode and will set them to consistent instead. @@ -26,7 +28,7 @@ - Fixed error raised when a non string ID is used in an item. It now raises TypeError rather than AttributeError. Issue #11793 - thank you @Rabbit994. -## 4.0.0 (2020-05-20) +### 4.0.0 (2020-05-20) - Stable release. - Added HttpLoggingPolicy to pipeline to enable passing in a custom logger for request and response headers. diff --git a/sdk/cosmos/azure-cosmos/setup.py b/sdk/cosmos/azure-cosmos/setup.py index a2da46395b8c..65e30599f0e3 100644 --- a/sdk/cosmos/azure-cosmos/setup.py +++ b/sdk/cosmos/azure-cosmos/setup.py @@ -43,7 +43,7 @@ maintainer_email="askdocdb@microsoft.com", url="https://github.com/Azure/azure-sdk-for-python", classifiers=[ - "Development Status :: 5 - Production/Stable", + "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Natural Language :: English", "Operating System :: OS Independent", From 77c139a5eb0b38766efca01e1404878e5129c092 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 27 Dec 2021 16:02:34 -0500 Subject: [PATCH 51/56] send user warning for use of populate_query_metrics flag --- .../azure-cosmos/azure/cosmos/container.py | 36 +++++++++++++++---- .../azure/cosmos/cosmos_client.py | 21 ++++++++--- .../azure-cosmos/azure/cosmos/database.py | 29 ++++++++++++--- 3 files changed, 70 insertions(+), 16 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 06b05b34404f..83206dc58029 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -25,6 +25,7 @@ from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import import six +import warnings from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection @@ -120,7 +121,6 @@ def read( # type: (...) -> Dict[str, Any] """Read the container properties. - :param populate_query_metrics: Enable returning query metrics in response headers. :param populate_partition_key_range_statistics: Enable returning partition key range statistics in response headers. :param populate_quota_info: Enable returning collection storage quota information in response headers. @@ -135,6 +135,10 @@ def read( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if populate_partition_key_range_statistics is not None: request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics @@ -165,7 +169,6 @@ def read_item( :param item: The ID (name) or dict representing item to retrieve. :param partition_key: Partition key for the item to retrieve. - :param populate_query_metrics: Enable returning query metrics in response headers. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. @@ -191,6 +194,10 @@ def read_item( if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if post_trigger_include is not None: request_options["postTriggerInclude"] = post_trigger_include @@ -211,7 +218,6 @@ def read_all_items( """List all the items in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -223,6 +229,10 @@ def read_all_items( if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) feed_options["populateQueryMetrics"] = populate_query_metrics if hasattr(response_hook, "clear"): @@ -383,7 +393,6 @@ def replace_item( :param item: The ID (name) or dict representing item to be replaced. :param body: A dict-like object representing the item to replace. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. @@ -402,6 +411,10 @@ def replace_item( response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include @@ -431,7 +444,6 @@ def upsert_item( does not already exist, it is inserted. :param body: A dict-like object representing the item to update or insert. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. @@ -448,6 +460,10 @@ def upsert_item( response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include @@ -481,7 +497,6 @@ def create_item( :func:`ContainerProxy.upsert_item` method. :param body: A dict-like object representing the item to create. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :param indexing_directive: Indicate whether the document should be omitted from indexing. @@ -501,6 +516,10 @@ def create_item( request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) if populate_query_metrics: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include @@ -533,7 +552,6 @@ def delete_item( :param item: The ID (name) or dict representing item to be deleted. :param partition_key: Specifies the partition key value for the item. - :param populate_query_metrics: Enable returning query metrics in response headers. :param pre_trigger_include: trigger id to be used as pre operation trigger. :param post_trigger_include: trigger id to be used as post operation trigger. :keyword str session_token: Token for use with Session consistency. @@ -551,6 +569,10 @@ def delete_item( if partition_key is not None: request_options["partitionKey"] = self._set_partition_key(partition_key) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include is not None: request_options["preTriggerInclude"] = pre_trigger_include diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 75f3303d3877..b2bb02574634 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -25,6 +25,7 @@ from typing import Any, Dict, Optional, Union, cast, Iterable, List # pylint: disable=unused-import import six +import warnings from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection @@ -228,7 +229,6 @@ def create_database( # pylint: disable=redefined-builtin Create a new database with the given ID (name). :param id: ID (name) of the database to create. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :param int offer_throughput: The provisioned throughput for this offer. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. @@ -254,6 +254,10 @@ def create_database( # pylint: disable=redefined-builtin request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput @@ -340,7 +344,6 @@ def list_databases( """List the databases in a Cosmos DB SQL database account. :param int max_item_count: Max number of items to be returned in the enumeration operation. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -352,6 +355,10 @@ def list_databases( if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) @@ -377,7 +384,6 @@ def query_databases( :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be served as indexing was opted out on the requested paths. :param int max_item_count: Max number of items to be returned in the enumeration operation. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -391,6 +397,10 @@ def query_databases( if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) feed_options["populateQueryMetrics"] = populate_query_metrics if query: @@ -420,7 +430,6 @@ def delete_database( :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` instance of the database to delete. :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource @@ -433,6 +442,10 @@ def delete_database( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics database_link = self._get_database_link(database) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 85a40106bd72..574272f109dd 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -120,7 +120,6 @@ def read(self, populate_query_metrics=None, **kwargs): # type: (Optional[bool], Any) -> Dict[str, Any] """Read the database properties. - :param bool populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -134,6 +133,10 @@ def read(self, populate_query_metrics=None, **kwargs): request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics self._properties = self.client_connection.ReadDatabase( @@ -167,7 +170,6 @@ def create_container( :param partition_key: The partition key to use for the container. :param indexing_policy: The indexing policy to apply to the container. :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :param populate_query_metrics: Enable returning query metrics in response headers. :param offer_throughput: The provisioned throughput for this offer. :param unique_key_policy: The unique key policy to apply to the container. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. @@ -226,6 +228,10 @@ def create_container( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: request_options["offerThroughput"] = offer_throughput @@ -315,7 +321,6 @@ def delete_container( :param container: The ID (name) of the container to delete. You can either pass in the ID of the container to delete, a :class:`ContainerProxy` instance or a dict representing the properties of the container. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource @@ -328,6 +333,10 @@ def delete_container( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics collection_link = self._get_container_link(container) @@ -370,7 +379,6 @@ def list_containers(self, max_item_count=None, populate_query_metrics=None, **kw """List the containers in the database. :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -392,6 +400,10 @@ def list_containers(self, max_item_count=None, populate_query_metrics=None, **kw if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.ReadContainers( @@ -416,7 +428,6 @@ def query_containers( :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. - :param populate_query_metrics: Enable returning query metrics in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -428,6 +439,10 @@ def query_containers( if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) feed_options["populateQueryMetrics"] = populate_query_metrics result = self.client_connection.QueryContainers( @@ -489,6 +504,10 @@ def replace_container( request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) request_options["populateQueryMetrics"] = populate_query_metrics container_id = self._get_container_id(container) From 00b9d49178a2b1798e9cf89939d3b53e06b43c06 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Tue, 5 Apr 2022 17:37:41 -0400 Subject: [PATCH 52/56] Update CHANGELOG.md --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index 13530782ad21..732aaace65a4 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -9,6 +9,7 @@ #### Bugs Fixed #### Other Changes +- Added user warning to non-query methods trying to use `populate_query_metrics` options. ### 4.3.0b3 (2022-03-10) From 691fb653032cc07db227cc98e4ba2691e1fd1388 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Mon, 2 May 2022 16:53:00 -0400 Subject: [PATCH 53/56] Update container.py --- .../azure-cosmos/azure/cosmos/container.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 113d36e4000a..2b89ae8c8650 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -22,7 +22,7 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import +from typing import Any, Dict, List, Optional, Union, Iterable, cast, overload # pylint: disable=unused-import import warnings @@ -110,20 +110,29 @@ def _set_partition_key(self, partition_key): return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) return partition_key + @overload + def read( + self, + *, + populate_partition_key_range_statistics: Optional[bool] = None, + populate_quota_info: Optional[bool] = None, + **kwargs + ): + ... + + @distributed_trace def read( self, - populate_query_metrics=None, # type: Optional[bool] - populate_partition_key_range_statistics=None, # type: Optional[bool] - populate_quota_info=None, # type: Optional[bool] + *args, **kwargs # type: Any ): # type: (...) -> Dict[str, Any] """Read the container properties. - :param populate_partition_key_range_statistics: Enable returning partition key + :keyword bool populate_partition_key_range_statistics: Enable returning partition key range statistics in response headers. - :param populate_quota_info: Enable returning collection storage quota information in response headers. + :keyword bool populate_quota_info: Enable returning collection storage quota information in response headers. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword Callable response_hook: A callable invoked with the response metadata. @@ -134,12 +143,15 @@ def read( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - if populate_query_metrics is not None: + populate_query_metrics = args[0] if args else None or kwargs.pop('populate_query_metrics', None) + if populate_query_metrics: warnings.warn( "the populate_query_metrics flag does not apply to this method and will be removed in the future", UserWarning, ) request_options["populateQueryMetrics"] = populate_query_metrics + populate_partition_key_range_statistics = kwargs.pop("populate_partition_key_range_statistics", None) + populate_quota_info = kwargs.pop("populate_quota_info", None) if populate_partition_key_range_statistics is not None: request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics if populate_quota_info is not None: From 22c40fcfecd47fb4d8ff4bf944cbdc075ebf183a Mon Sep 17 00:00:00 2001 From: simorenoh Date: Wed, 4 May 2022 13:06:02 -0400 Subject: [PATCH 54/56] added tests --- .../azure-cosmos/azure/cosmos/container.py | 8 +- .../test/test_backwards_compatibility.py | 113 ++++++++++++++++++ 2 files changed, 117 insertions(+), 4 deletions(-) create mode 100644 sdk/cosmos/azure-cosmos/test/test_backwards_compatibility.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 99f656676a8b..0ab8cec7112d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -143,15 +143,15 @@ def read( """ request_options = build_options(kwargs) response_hook = kwargs.pop('response_hook', None) - populate_query_metrics = args[0] if args else None or kwargs.pop('populate_query_metrics', None) + populate_query_metrics = args[0] if args else kwargs.pop('populate_query_metrics', None) if populate_query_metrics: warnings.warn( "the populate_query_metrics flag does not apply to this method and will be removed in the future", UserWarning, ) - request_options["populateQueryMetrics"] = populate_query_metrics - populate_partition_key_range_statistics = kwargs.pop("populate_partition_key_range_statistics", None) - populate_quota_info = kwargs.pop("populate_quota_info", None) + populate_partition_key_range_statistics = args[1] if args else kwargs.pop( + "populate_partition_key_range_statistics", None) + populate_quota_info = args[2] if args else kwargs.pop("populate_quota_info", None) if populate_partition_key_range_statistics is not None: request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics if populate_quota_info is not None: diff --git a/sdk/cosmos/azure-cosmos/test/test_backwards_compatibility.py b/sdk/cosmos/azure-cosmos/test/test_backwards_compatibility.py new file mode 100644 index 000000000000..23550d2e47da --- /dev/null +++ b/sdk/cosmos/azure-cosmos/test/test_backwards_compatibility.py @@ -0,0 +1,113 @@ +# The MIT License (MIT) +# Copyright (c) 2022 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import unittest +import pytest +from azure.cosmos import cosmos_client, PartitionKey, http_constants +import test_config +from unittest.mock import MagicMock + + +# This class tests the backwards compatibility of features being deprecated to ensure users are not broken before +# properly removing the methods marked for deprecation. + +pytestmark = pytest.mark.cosmosEmulator + + +@pytest.mark.usefixtures("teardown") +class TestBackwardsCompatibility(unittest.TestCase): + + configs = test_config._test_config + host = configs.host + masterKey = configs.masterKey + + populate_true = True + + @classmethod + def setUpClass(cls): + if cls.masterKey == '[YOUR_KEY_HERE]' or cls.host == '[YOUR_ENDPOINT_HERE]': + raise Exception( + "You must specify your Azure Cosmos account values for " + "'masterKey' and 'host' at the top of this class to run the " + "tests.") + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, consistency_level="Session") + cls.databaseForTest = cls.client.create_database_if_not_exists(cls.configs.TEST_DATABASE_ID, + offer_throughput=500) + cls.containerForTest = cls.databaseForTest.create_container_if_not_exists( + cls.configs.TEST_COLLECTION_SINGLE_PARTITION_ID, PartitionKey(path="/id"), offer_throughput=400) + + def side_effect_populate_partition_key_range_statistics(self, *args, **kwargs): + # Extract request headers from args + self.assertTrue(args[2][http_constants.HttpHeaders.PopulatePartitionKeyRangeStatistics] is True) + raise StopIteration + + def side_effect_populate_query_metrics(self, *args, **kwargs): + # Extract request headers from args + self.assertTrue(args[2][http_constants.HttpHeaders.PopulateQueryMetrics] is True) + raise StopIteration + + def side_effect_populate_quota_info(self, *args, **kwargs): + # Extract request headers from args + self.assertTrue(args[2][http_constants.HttpHeaders.PopulateQuotaInfo] is True) + raise StopIteration + + def test_populate_query_metrics(self): + cosmos_client_connection = self.containerForTest.client_connection + cosmos_client_connection._CosmosClientConnection__Get = MagicMock( + side_effect=self.side_effect_populate_query_metrics) + try: + self.containerForTest.read(populate_query_metrics=True) + except StopIteration: + pass + try: + self.containerForTest.read(True) + except StopIteration: + pass + + def test_populate_quota_info(self): + cosmos_client_connection = self.containerForTest.client_connection + cosmos_client_connection._CosmosClientConnection__Get = MagicMock( + side_effect=self.side_effect_populate_quota_info) + try: + self.containerForTest.read(populate_quota_info=True) + except StopIteration: + pass + try: + self.containerForTest.read(False, True) + except StopIteration: + pass + + def test_populate_partition_key_range_statistics(self): + cosmos_client_connection = self.containerForTest.client_connection + cosmos_client_connection._CosmosClientConnection__Get = MagicMock( + side_effect=self.side_effect_populate_partition_key_range_statistics) + try: + self.containerForTest.read(populate_partition_key_range_statistics=True) + except StopIteration: + pass + try: + self.containerForTest.read(False, False, True) + except StopIteration: + pass + + +if __name__ == "__main__": + unittest.main() From a6c0c66a61b54233750e2630dc6c7ac78e7c58e5 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 12 May 2022 16:51:38 -0400 Subject: [PATCH 55/56] avoid index errors --- sdk/cosmos/azure-cosmos/azure/cosmos/container.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 0ab8cec7112d..18b22c8e2505 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -149,9 +149,9 @@ def read( "the populate_query_metrics flag does not apply to this method and will be removed in the future", UserWarning, ) - populate_partition_key_range_statistics = args[1] if args else kwargs.pop( + populate_partition_key_range_statistics = args[1] if args and len(args) > 0 else kwargs.pop( "populate_partition_key_range_statistics", None) - populate_quota_info = args[2] if args else kwargs.pop("populate_quota_info", None) + populate_quota_info = args[2] if args and len(args) > 1 else kwargs.pop("populate_quota_info", None) if populate_partition_key_range_statistics is not None: request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics if populate_quota_info is not None: From 1813f9bb8fb52f217ab1c950fe91f3dc195ddab7 Mon Sep 17 00:00:00 2001 From: simorenoh Date: Thu, 12 May 2022 16:53:31 -0400 Subject: [PATCH 56/56] Update CHANGELOG.md --- sdk/cosmos/azure-cosmos/CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index d1ad241fc908..83d09d1b950c 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.3.0b5 (Unreleased) #### Breaking Changes +- Method signatures have been updated to use keyword arguments instead of positional arguments for most method options in the async client. - Bugfix: Automatic Id generation for items was turned on for `upsert_items()` method when no 'id' value was present in document body. Method call will now require an 'id' field to be present in the document body.