diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py similarity index 98% rename from sdk/cosmos/azure-cosmos/azure/cosmos/base.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 5fa2839f0b0d..6e4bf3e10888 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -33,8 +33,8 @@ from . import documents from . import partition_key from . import http_constants -from . import constants -from . import runtime_constants +from . import _constants as constants +from . import _runtime_constants import six from six.moves.urllib.parse import quote as urllib_quote @@ -178,10 +178,10 @@ def GetHeaders(cosmos_client_connection, if verb == 'post' or verb == 'put': if not headers.get(http_constants.HttpHeaders.ContentType): - headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.Json + headers[http_constants.HttpHeaders.ContentType] = _runtime_constants.MediaTypes.Json if not headers.get(http_constants.HttpHeaders.Accept): - headers[http_constants.HttpHeaders.Accept] = runtime_constants.MediaTypes.Json + headers[http_constants.HttpHeaders.Accept] = _runtime_constants.MediaTypes.Json if partition_key_range_id is not None: headers[http_constants.HttpHeaders.PartitionKeyRangeID] = partition_key_range_id diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/consistent_hash_ring.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_consistent_hash_ring.py similarity index 96% rename from sdk/cosmos/azure-cosmos/azure/cosmos/consistent_hash_ring.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_consistent_hash_ring.py index 6b6bdd086b62..93198b3f1d72 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/consistent_hash_ring.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_consistent_hash_ring.py @@ -26,10 +26,10 @@ import six from six.moves import xrange -from . import partition +from . import _partition -class _ConsistentHashRing(object): +class ConsistentHashRing(object): """The ConsistentHashRing class implements a consistent hash ring using the hash generator specified. """ @@ -79,13 +79,13 @@ def _ConstructPartitions(self, collection_links, partitions_per_node): using the hashing algorithm and then finally sorting the partitions based on the hash value. """ collections_node_count = len(collection_links) - partitions = [partition._Partition() for _ in xrange(0, partitions_per_node * collections_node_count)] + partitions = [_partition.Partition() for _ in xrange(0, partitions_per_node * collections_node_count)] index = 0 for collection_node in collection_links: hash_value = self.hash_generator.ComputeHash(self._GetBytes(collection_node)) for _ in xrange(0, partitions_per_node): - partitions[index] = partition._Partition(hash_value, collection_node) + partitions[index] = _partition.Partition(hash_value, collection_node) index += 1 hash_value = self.hash_generator.ComputeHash(hash_value) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_constants.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/constants.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_constants.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py similarity index 98% rename from sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client_connection.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index f7b2fe1cca57..6f9fe50a1c86 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -25,18 +25,18 @@ import six from typing import cast -from . import base +from . import _base as base from . import documents -from . import constants +from . import _constants as constants from . import http_constants -from . import query_iterable -from . import runtime_constants -from . import request_object -from . import synchronized_request -from . import global_endpoint_manager -from .routing import routing_map_provider as routing_map_provider -from . import session -from . import utils +from . import _query_iterable as query_iterable +from . import _runtime_constants as runtime_constants +from . import _request_object +from . import _synchronized_request as synchronized_request +from . import _global_endpoint_manager as global_endpoint_manager +from ._routing import routing_map_provider as routing_map_provider +from . import _session +from . import _utils from .partition_key import _Undefined, _Empty @@ -110,7 +110,7 @@ def __init__(self, http_constants.HttpHeaders.Version: http_constants.Versions.CurrentVersion, http_constants.HttpHeaders.UserAgent: - utils._get_user_agent(), + _utils.get_user_agent(), # For single partition query with aggregate functions we would try to accumulate the results on the SDK. # We need to set continuation as not expected. http_constants.HttpHeaders.IsContinuationExpected: False @@ -127,7 +127,7 @@ def __init__(self, '''create a session - this is maintained only if the default consistency level on the client is set to session, or if the user explicitly sets it as a property via setter''' - self.session = session.Session(self.url_connection) + self.session = _session.Session(self.url_connection) else: self.session = None @@ -150,7 +150,7 @@ def __init__(self, self._query_compatibility_mode = CosmosClientConnection._QueryCompatibilityMode.Default # Routing map provider - self._routing_map_provider = routing_map_provider._SmartRoutingMapProvider(self) + self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) database_account = self._global_endpoint_manager._GetDatabaseAccount() self._global_endpoint_manager.force_refresh(database_account) @@ -1932,7 +1932,7 @@ def ReadMedia(self, media_link): {}) # ReadMedia will always use WriteEndpoint since it's not replicated in readable Geo regions - request = request_object._RequestObject('media', documents._OperationType.Read) + request = _request_object.RequestObject('media', documents._OperationType.Read) result, self.last_response_headers = self.__Get(path, request, headers) @@ -1981,7 +1981,7 @@ def UpdateMedia(self, media_link, readable_stream, options=None): options) # UpdateMedia will use WriteEndpoint since it uses PUT operation - request = request_object._RequestObject('media', documents._OperationType.Update) + request = _request_object.RequestObject('media', documents._OperationType.Update) result, self.last_response_headers = self.__Put(path, request, readable_stream, @@ -2200,7 +2200,7 @@ def ExecuteStoredProcedure(self, sproc_link, params, options=None): options) # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation - request = request_object._RequestObject('sprocs', documents._OperationType.ExecuteJavaScript) + request = _request_object.RequestObject('sprocs', documents._OperationType.ExecuteJavaScript) result, self.last_response_headers = self.__Post(path, request, params, @@ -2389,7 +2389,7 @@ def GetDatabaseAccount(self, url_connection=None): '', # type {}) - request = request_object._RequestObject('databaseaccount', documents._OperationType.Read, url_connection) + request = _request_object.RequestObject('databaseaccount', documents._OperationType.Read, url_connection) result, self.last_response_headers = self.__Get('', request, headers) @@ -2449,7 +2449,7 @@ def Create(self, body, path, type, id, initial_headers, options=None): options) # Create will use WriteEndpoint since it uses POST operation - request = request_object._RequestObject(type, documents._OperationType.Create) + request = _request_object.RequestObject(type, documents._OperationType.Create) result, self.last_response_headers = self.__Post(path, request, body, @@ -2491,7 +2491,7 @@ def Upsert(self, body, path, type, id, initial_headers, options=None): headers[http_constants.HttpHeaders.IsUpsert] = True # Upsert will use WriteEndpoint since it uses POST operation - request = request_object._RequestObject(type, documents._OperationType.Upsert) + request = _request_object.RequestObject(type, documents._OperationType.Upsert) result, self.last_response_headers = self.__Post(path, request, body, @@ -2529,7 +2529,7 @@ def Replace(self, resource, path, type, id, initial_headers, options=None): type, options) # Replace will use WriteEndpoint since it uses PUT operation - request = request_object._RequestObject(type, documents._OperationType.Replace) + request = _request_object.RequestObject(type, documents._OperationType.Replace) result, self.last_response_headers = self.__Put(path, request, resource, @@ -2567,7 +2567,7 @@ def Read(self, path, type, id, initial_headers, options=None): type, options) # Read will use ReadEndpoint since it uses GET operation - request = request_object._RequestObject(type, documents._OperationType.Read) + request = _request_object.RequestObject(type, documents._OperationType.Read) result, self.last_response_headers = self.__Get(path, request, headers) @@ -2601,7 +2601,7 @@ def DeleteResource(self, path, type, id, initial_headers, options=None): type, options) # Delete will use WriteEndpoint since it uses DELETE operation - request = request_object._RequestObject(type, documents._OperationType.Delete) + request = _request_object.RequestObject(type, documents._OperationType.Delete) result, self.last_response_headers = self.__Delete(path, request, headers) @@ -2783,7 +2783,7 @@ def __GetBodiesFromQueryResult(result): # Copy to make sure that default_headers won't be changed. if query is None: # Query operations will use ReadEndpoint even though it uses GET(for feed requests) - request = request_object._RequestObject(type, documents._OperationType.ReadFeed) + request = _request_object.RequestObject(type, documents._OperationType.ReadFeed) headers = base.GetHeaders(self, initial_headers, 'get', @@ -2811,7 +2811,7 @@ def __GetBodiesFromQueryResult(result): raise SystemError('Unexpected query compatibility mode.') # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) - request = request_object._RequestObject(type, documents._OperationType.SqlQuery) + request = _request_object.RequestObject(type, documents._OperationType.SqlQuery) headers = base.GetHeaders(self, initial_headers, 'post', diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py similarity index 96% rename from sdk/cosmos/azure-cosmos/azure/cosmos/default_retry_policy.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index 1fc309c98302..8cb094c1552a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -23,7 +23,7 @@ """ from . import http_constants -class _DefaultRetryPolicy(object): +class DefaultRetryPolicy(object): error_codes = http_constants._ErrorCodes CONNECTION_ERROR_CODES = [ @@ -52,7 +52,7 @@ def __init__(self, *args): self.args = args def needsRetry(self, error_code): - if error_code in _DefaultRetryPolicy.CONNECTION_ERROR_CODES: + if error_code in DefaultRetryPolicy.CONNECTION_ERROR_CODES: if (len(self.args) > 0): if (self.args[4]['method'] == 'GET') or (http_constants.HttpHeaders.IsQuery in self.args[4]['headers']): return True diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/endpoint_discovery_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py similarity index 94% rename from sdk/cosmos/azure-cosmos/azure/cosmos/endpoint_discovery_retry_policy.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py index dbc1e41f845f..d3158cd20035 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/endpoint_discovery_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py @@ -33,7 +33,7 @@ logger.addHandler(log_handler) -class _EndpointDiscoveryRetryPolicy(object): +class EndpointDiscoveryRetryPolicy(object): """The endpoint discovery retry policy class used for geo-replicated database accounts to handle the write forbidden exceptions due to writable/readable location changes (say, after a failover). @@ -44,9 +44,9 @@ class _EndpointDiscoveryRetryPolicy(object): def __init__(self, connection_policy, global_endpoint_manager, *args): self.global_endpoint_manager = global_endpoint_manager - self._max_retry_attempt_count = _EndpointDiscoveryRetryPolicy.Max_retry_attempt_count + self._max_retry_attempt_count = EndpointDiscoveryRetryPolicy.Max_retry_attempt_count self.failover_retry_count = 0 - self.retry_after_in_milliseconds = _EndpointDiscoveryRetryPolicy.Retry_after_in_milliseconds + self.retry_after_in_milliseconds = EndpointDiscoveryRetryPolicy.Retry_after_in_milliseconds self.connection_policy = connection_policy self.request = args[0] if args else None #clear previous location-based routing directive diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/__init__.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/__init__.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/__init__.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/aggregators.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aggregators.py similarity index 97% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/aggregators.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aggregators.py index 402acb514fbc..2a3ed76bce64 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/aggregators.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/aggregators.py @@ -22,7 +22,7 @@ """Internal class for aggregation queries implementation in the Azure Cosmos database service. """ from abc import abstractmethod, ABCMeta -from azure.cosmos.execution_context.document_producer import _OrderByHelper +from azure.cosmos._execution_context.document_producer import _OrderByHelper class _Aggregator(object): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/base_execution_context.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py similarity index 94% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/base_execution_context.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py index 42bd23269da2..e32fb2e95b28 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/base_execution_context.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/base_execution_context.py @@ -23,9 +23,9 @@ """ from collections import deque -from .. import retry_utility +from .. import _retry_utility from .. import http_constants -from .. import base +from .. import _base class _QueryExecutionContextBase(object): """ @@ -140,7 +140,7 @@ def _fetch_items_helper_with_retries(self, fetch_function): def callback(): return self._fetch_items_helper_no_retries(fetch_function) - return retry_utility._Execute(self._client, self._client._global_endpoint_manager, callback) + return _retry_utility.Execute(self._client, self._client._global_endpoint_manager, callback) class _DefaultQueryExecutionContext(_QueryExecutionContextBase): @@ -208,8 +208,8 @@ def __init__(self, client, options, database_link, query, partition_key): raise ValueError("_collection_links_length is not greater than 0.") # Creating the QueryFeed for the first collection - path = base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') - collection_id = base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) + path = _base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') + collection_id = _base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) self._current_collection_index += 1 @@ -241,8 +241,8 @@ def _fetch_next_block(self): # creating separate feed queries for each collection and fetching the items while not fetched_items: if self._collection_links and self._current_collection_index < self._collection_links_length: - path = base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') - collection_id = base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) + path = _base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') + collection_id = _base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) self._continuation = None self._has_started = False diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py similarity index 97% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/document_producer.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py index c74a60e3bade..f6f7db7e911e 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/document_producer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py @@ -25,8 +25,8 @@ import six import numbers from collections import deque -from azure.cosmos import base -from azure.cosmos.execution_context.base_execution_context import _DefaultQueryExecutionContext +from azure.cosmos import _base +from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext from six.moves import xrange class _DocumentProducer(object): @@ -51,8 +51,8 @@ def __init__(self, partition_key_target_range, client, collection_link, query, d self._cur_item = None # initiate execution context - path = base.GetPathFromLink(collection_link, 'docs') - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + path = _base.GetPathFromLink(collection_link, 'docs') + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self._client.QueryFeed(path, collection_id, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/endpoint_component.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py similarity index 97% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/endpoint_component.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py index 868a55464639..77dda8512b00 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/endpoint_component.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/endpoint_component.py @@ -23,7 +23,7 @@ """ import numbers -from azure.cosmos.execution_context.aggregators import _AverageAggregator, _CountAggregator, _MaxAggregator, \ +from azure.cosmos._execution_context.aggregators import _AverageAggregator, _CountAggregator, _MaxAggregator, \ _MinAggregator, _SumAggregator diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py similarity index 94% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/execution_dispatcher.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index 6f7740af4f43..acc7452da9ab 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -25,11 +25,11 @@ import json from six.moves import xrange from azure.cosmos.errors import HTTPFailure -from azure.cosmos.execution_context.base_execution_context import _QueryExecutionContextBase -from azure.cosmos.execution_context.base_execution_context import _DefaultQueryExecutionContext -from azure.cosmos.execution_context.query_execution_info import _PartitionedQueryExecutionInfo -from azure.cosmos.execution_context import endpoint_component -from azure.cosmos.execution_context import multi_execution_aggregator +from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext +from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo +from azure.cosmos._execution_context import endpoint_component +from azure.cosmos._execution_context import multi_execution_aggregator from azure.cosmos.http_constants import StatusCodes, SubStatusCodes class _ProxyQueryExecutionContext(_QueryExecutionContextBase): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/multi_execution_aggregator.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/multi_execution_aggregator.py similarity index 95% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/multi_execution_aggregator.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/multi_execution_aggregator.py index 63a2dfb3b706..f0bfb8887fd1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/multi_execution_aggregator.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/multi_execution_aggregator.py @@ -23,9 +23,9 @@ """ import heapq -from azure.cosmos.execution_context.base_execution_context import _QueryExecutionContextBase -from azure.cosmos.execution_context import document_producer -from azure.cosmos.routing import routing_range +from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context import document_producer +from azure.cosmos._routing import routing_range class _MultiExecutionContextAggregator(_QueryExecutionContextBase): """This class is capable of queries which requires rewriting based on @@ -147,4 +147,4 @@ def _createTargetPartitionQueryExecutionContext(self, partition_key_target_range def _get_target_parition_key_range(self): query_ranges = self._partitioned_query_ex_info.get_query_ranges() - return self._routing_provider.get_overlapping_ranges(self._resource_link, [routing_range._Range.ParseFromDict(range_as_dict) for range_as_dict in query_ranges]) + return self._routing_provider.get_overlapping_ranges(self._resource_link, [routing_range.Range.ParseFromDict(range_as_dict) for range_as_dict in query_ranges]) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/query_execution_info.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/execution_context/query_execution_info.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/query_execution_info.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/global_endpoint_manager.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py similarity index 98% rename from sdk/cosmos/azure-cosmos/azure/cosmos/global_endpoint_manager.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py index 0d7ff3936b76..0c44a81a138a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/global_endpoint_manager.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py @@ -24,9 +24,9 @@ from six.moves.urllib.parse import urlparse import threading -from . import constants +from . import _constants as constants from . import errors -from .location_cache import LocationCache +from ._location_cache import LocationCache class _GlobalEndpointManager(object): """ diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/hash_partition_resolver.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_hash_partition_resolver.py similarity index 95% rename from sdk/cosmos/azure-cosmos/azure/cosmos/hash_partition_resolver.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_hash_partition_resolver.py index 8b15926ec680..fca0cfe85d6f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/hash_partition_resolver.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_hash_partition_resolver.py @@ -22,8 +22,8 @@ """Hash partition resolver implementation in the Azure Cosmos database service. """ -from . import murmur_hash -from . import consistent_hash_ring +from . import _murmur_hash +from . import _consistent_hash_ring class HashPartitionResolver(object): """HashPartitionResolver implements partitioning based on the value of a hash function, allowing you to evenly @@ -51,9 +51,9 @@ def __init__(self, partition_key_extractor, collection_links, default_number_of_ self.collection_links = collection_links if hash_generator is None: - hash_generator = murmur_hash._MurmurHash() + hash_generator = _murmur_hash.MurmurHash() - self.consistent_hash_ring = consistent_hash_ring._ConsistentHashRing(self.collection_links, default_number_of_virtual_nodes_per_collection, hash_generator) + self.consistent_hash_ring = _consistent_hash_ring.ConsistentHashRing(self.collection_links, default_number_of_virtual_nodes_per_collection, hash_generator) def ResolveForCreate(self, document): """Resolves the collection for creating the document based on the partition key. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/location_cache.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py similarity index 99% rename from sdk/cosmos/azure-cosmos/azure/cosmos/location_cache.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py index 1534cef4f9ea..68f7cdb23a24 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/location_cache.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_location_cache.py @@ -25,7 +25,6 @@ import collections import time -from . import base from . import documents from . import http_constants diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/murmur_hash.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_murmur_hash.py similarity index 99% rename from sdk/cosmos/azure-cosmos/azure/cosmos/murmur_hash.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_murmur_hash.py index d94b5f39044b..0785b75096e6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/murmur_hash.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_murmur_hash.py @@ -43,7 +43,7 @@ https://pypi.python.org/pypi/mmh3/2.0 ''' -class _MurmurHash(object): +class MurmurHash(object): """ The 32 bit x86 version of MurmurHash3 implementation. """ def ComputeHash(self, key): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/partition.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_partition.py similarity index 99% rename from sdk/cosmos/azure-cosmos/azure/cosmos/partition.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_partition.py index ffd931965bca..35f1611a21bc 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/partition.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_partition.py @@ -24,7 +24,7 @@ from six.moves import xrange -class _Partition(object): +class Partition(object): """Represents a class that holds the hash value and node name for each partition. """ def __init__(self, hash_value = None, node = None): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py similarity index 97% rename from sdk/cosmos/azure-cosmos/azure/cosmos/query_iterable.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index ad2e8cd98d96..8abd73b4f8c1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -21,8 +21,8 @@ """Iterable query results in the Azure Cosmos database service. """ -from azure.cosmos.execution_context import execution_dispatcher -from azure.cosmos.execution_context import base_execution_context +from azure.cosmos._execution_context import execution_dispatcher +from azure.cosmos._execution_context import base_execution_context class QueryIterable(object): """Represents an iterable object of the query results. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/range.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_range.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/range.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_range.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/range_partition_resolver.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_range_partition_resolver.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/range_partition_resolver.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_range_partition_resolver.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/request_object.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py similarity index 98% rename from sdk/cosmos/azure-cosmos/azure/cosmos/request_object.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py index db9ef786765f..fa882a0a0c54 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/request_object.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_request_object.py @@ -21,7 +21,7 @@ """Represents a request object. """ -class _RequestObject(object): +class RequestObject(object): def __init__(self, resource_type, operation_type, endpoint_override = None): self.resource_type = resource_type self.operation_type = operation_type diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/resource_throttle_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py similarity index 98% rename from sdk/cosmos/azure-cosmos/azure/cosmos/resource_throttle_retry_policy.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py index f3179a1902ef..57d32fd2b8e7 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/resource_throttle_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py @@ -24,7 +24,7 @@ from . import http_constants -class _ResourceThrottleRetryPolicy(object): +class ResourceThrottleRetryPolicy(object): def __init__(self, max_retry_attempt_count, fixed_retry_interval_in_milliseconds, max_wait_time_in_seconds): self._max_retry_attempt_count = max_retry_attempt_count diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/retry_options.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_options.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/retry_options.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_retry_options.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py similarity index 83% rename from sdk/cosmos/azure-cosmos/azure/cosmos/retry_utility.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py index 1daa9a1f8c26..95f611fe4f2d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py @@ -25,13 +25,13 @@ import time from . import errors -from . import endpoint_discovery_retry_policy -from . import resource_throttle_retry_policy -from . import default_retry_policy -from . import session_retry_policy +from . import _endpoint_discovery_retry_policy +from . import _resource_throttle_retry_policy +from . import _default_retry_policy +from . import _session_retry_policy from .http_constants import HttpHeaders, StatusCodes, SubStatusCodes -def _Execute(client, global_endpoint_manager, function, *args, **kwargs): +def Execute(client, global_endpoint_manager, function, *args, **kwargs): """Exectutes the function with passed parameters applying all retry policies :param object client: @@ -45,20 +45,20 @@ def _Execute(client, global_endpoint_manager, function, *args, **kwargs): """ # instantiate all retry policies here to be applied for each request execution - endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(client.connection_policy, global_endpoint_manager, *args) + endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(client.connection_policy, global_endpoint_manager, *args) - resourceThrottle_retry_policy = resource_throttle_retry_policy._ResourceThrottleRetryPolicy(client.connection_policy.RetryOptions.MaxRetryAttemptCount, + resourceThrottle_retry_policy = _resource_throttle_retry_policy.ResourceThrottleRetryPolicy(client.connection_policy.RetryOptions.MaxRetryAttemptCount, client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, client.connection_policy.RetryOptions.MaxWaitTimeInSeconds) - defaultRetry_policy = default_retry_policy._DefaultRetryPolicy(*args) + defaultRetry_policy = _default_retry_policy.DefaultRetryPolicy(*args) - sessionRetry_policy = session_retry_policy._SessionRetryPolicy(client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args) + sessionRetry_policy = _session_retry_policy._SessionRetryPolicy(client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args) while True: try: if args: - result = _ExecuteFunction(function, global_endpoint_manager, *args, **kwargs) + result = ExecuteFunction(function, global_endpoint_manager, *args, **kwargs) else: - result = _ExecuteFunction(function, *args, **kwargs) + result = ExecuteFunction(function, *args, **kwargs) if not client.last_response_headers: client.last_response_headers = {} @@ -94,7 +94,7 @@ def _Execute(client, global_endpoint_manager, function, *args, **kwargs): # Wait for retry_after_in_milliseconds time before the next retry time.sleep(retry_policy.retry_after_in_milliseconds / 1000.0) -def _ExecuteFunction(function, *args, **kwargs): +def ExecuteFunction(function, *args, **kwargs): """ Stub method so that it can be used for mocking purposes as well. """ return function(*args, **kwargs) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/__init__.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/routing/__init__.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_routing/__init__.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/collection_routing_map.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/collection_routing_map.py similarity index 79% rename from sdk/cosmos/azure-cosmos/azure/cosmos/routing/collection_routing_map.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_routing/collection_routing_map.py index bb030c3fc223..d1f86eb05b89 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/collection_routing_map.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/collection_routing_map.py @@ -23,11 +23,11 @@ """ import bisect -from azure.cosmos.routing import routing_range -from azure.cosmos.routing.routing_range import _PartitionKeyRange +from azure.cosmos._routing import routing_range +from azure.cosmos._routing.routing_range import PartitionKeyRange from six.moves import xrange -class _CollectionRoutingMap(object): +class CollectionRoutingMap(object): """Stores partition key ranges in an efficient way with some additional information and provides convenience methods for working with set of ranges. """ @@ -40,7 +40,7 @@ def __init__(self, range_by_id, range_by_info, ordered_partition_key_ranges, ord self._rangeByInfo = range_by_info self._orderedPartitionKeyRanges = ordered_partition_key_ranges - self._orderedRanges = [routing_range._Range(pkr[_PartitionKeyRange.MinInclusive], pkr[_PartitionKeyRange.MaxExclusive], True, False) for pkr in ordered_partition_key_ranges] + self._orderedRanges = [routing_range.Range(pkr[PartitionKeyRange.MinInclusive], pkr[PartitionKeyRange.MaxExclusive], True, False) for pkr in ordered_partition_key_ranges] self._orderedPartitionInfo = ordered_partition_info self._collectionUniqueId = collection_unique_id @@ -51,15 +51,15 @@ def CompleteRoutingMap(cls, partition_key_range_info_tupple_list, collection_uni sortedRanges = [] for r in partition_key_range_info_tupple_list: - rangeById[r[0][_PartitionKeyRange.Id]] = r + rangeById[r[0][PartitionKeyRange.Id]] = r rangeByInfo[r[1]] = r[0] sortedRanges.append(r) - sortedRanges.sort(key = lambda r: r[0][_PartitionKeyRange.MinInclusive]) + sortedRanges.sort(key = lambda r: r[0][PartitionKeyRange.MinInclusive]) partitionKeyOrderedRange = [r[0] for r in sortedRanges] orderedPartitionInfo = [r[1] for r in sortedRanges] - if not _CollectionRoutingMap.is_complete_set_of_range(partitionKeyOrderedRange): return None + if not CollectionRoutingMap.is_complete_set_of_range(partitionKeyOrderedRange): return None return cls(rangeById, rangeByInfo, partitionKeyOrderedRange, orderedPartitionInfo, collection_unique_id) def get_ordered_partition_key_ranges(self): @@ -80,10 +80,10 @@ def get_range_by_effective_partition_key(self, effective_partition_key_value): The partition key range. :rtype: dict """ - if _CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey == effective_partition_key_value: + if CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey == effective_partition_key_value: return self._orderedPartitionKeyRanges[0] - if _CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey == effective_partition_key_value: + if CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey == effective_partition_key_value: return None sortedLow = [(r.min, not r.isMinInclusive) for r in self._orderedRanges] @@ -118,7 +118,7 @@ def get_overlapping_ranges(self, provided_partition_key_ranges): :rtype: list """ - if isinstance(provided_partition_key_ranges, routing_range._Range): + if isinstance(provided_partition_key_ranges, routing_range.Range): return self.get_overlapping_ranges([provided_partition_key_ranges]) minToPartitionRange = {} @@ -135,14 +135,14 @@ def get_overlapping_ranges(self, provided_partition_key_ranges): maxIndex = maxIndex - 1 for i in xrange(minIndex, maxIndex + 1): - if routing_range._Range.overlaps(self._orderedRanges[i], providedRange): - minToPartitionRange[self._orderedPartitionKeyRanges[i][_PartitionKeyRange.MinInclusive]] = self._orderedPartitionKeyRanges[i] + if routing_range.Range.overlaps(self._orderedRanges[i], providedRange): + minToPartitionRange[self._orderedPartitionKeyRanges[i][PartitionKeyRange.MinInclusive]] = self._orderedPartitionKeyRanges[i] overlapping_partition_key_ranges = list(minToPartitionRange.values()) def getKey(r): - return r[_PartitionKeyRange.MinInclusive] + return r[PartitionKeyRange.MinInclusive] overlapping_partition_key_ranges.sort(key = getKey) return overlapping_partition_key_ranges @@ -153,16 +153,16 @@ def is_complete_set_of_range(ordered_partition_key_range_list): firstRange = ordered_partition_key_range_list[0] lastRange = ordered_partition_key_range_list[-1] - isComplete = (firstRange[_PartitionKeyRange.MinInclusive] == _CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey) - isComplete &= (lastRange[_PartitionKeyRange.MaxExclusive] == _CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey) + isComplete = (firstRange[PartitionKeyRange.MinInclusive] == CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey) + isComplete &= (lastRange[PartitionKeyRange.MaxExclusive] == CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey) for i in range(1, len(ordered_partition_key_range_list)): previousRange = ordered_partition_key_range_list[i - 1] currentRange = ordered_partition_key_range_list[i] - isComplete &= previousRange[_PartitionKeyRange.MaxExclusive] == currentRange[_PartitionKeyRange.MinInclusive] + isComplete &= previousRange[PartitionKeyRange.MaxExclusive] == currentRange[PartitionKeyRange.MinInclusive] if not isComplete: - if previousRange[_PartitionKeyRange.MaxExclusive] > currentRange[_PartitionKeyRange.MinInclusive]: + if previousRange[PartitionKeyRange.MaxExclusive] > currentRange[PartitionKeyRange.MinInclusive]: raise ValueError("Ranges overlap") break diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/routing_map_provider.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py similarity index 80% rename from sdk/cosmos/azure-cosmos/azure/cosmos/routing/routing_map_provider.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py index 73c9ed56529d..7b371e97e2d3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/routing_map_provider.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_map_provider.py @@ -22,14 +22,14 @@ """Internal class for partition key range cache implementation in the Azure Cosmos database service. """ -from .. import base -from .collection_routing_map import _CollectionRoutingMap +from .. import _base +from .collection_routing_map import CollectionRoutingMap from . import routing_range -from .routing_range import _PartitionKeyRange +from .routing_range import PartitionKeyRange -class _PartitionKeyRangeCache(object): +class PartitionKeyRangeCache(object): ''' - _PartitionKeyRangeCache provides list of effective partition key ranges for a collection. + PartitionKeyRangeCache provides list of effective partition key ranges for a collection. This implementation loads and caches the collection routing map per collection on demand. ''' @@ -59,7 +59,7 @@ def get_overlapping_ranges(self, collection_link, partition_key_ranges): ''' cl = self._documentClient - collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) collection_routing_map = self._collection_routing_map_by_item.get(collection_id) if collection_routing_map is None: @@ -67,8 +67,8 @@ def get_overlapping_ranges(self, collection_link, partition_key_ranges): # for large collections, a split may complete between the read partition key ranges query page responses, # causing the partitionKeyRanges to have both the children ranges and their parents. Therefore, we need # to discard the parent ranges to have a valid routing map. - collection_pk_ranges = _PartitionKeyRangeCache._discard_parent_ranges(collection_pk_ranges) - collection_routing_map = _CollectionRoutingMap.CompleteRoutingMap([(r, True) for r in collection_pk_ranges], collection_id) + collection_pk_ranges = PartitionKeyRangeCache._discard_parent_ranges(collection_pk_ranges) + collection_routing_map = CollectionRoutingMap.CompleteRoutingMap([(r, True) for r in collection_pk_ranges], collection_id) self._collection_routing_map_by_item[collection_id] = collection_routing_map return collection_routing_map.get_overlapping_ranges(partition_key_ranges) @@ -76,17 +76,17 @@ def get_overlapping_ranges(self, collection_link, partition_key_ranges): def _discard_parent_ranges(partitionKeyRanges): parentIds = set() for r in partitionKeyRanges: - if isinstance(r, dict) and _PartitionKeyRange.Parents in r: - for parentId in r[_PartitionKeyRange.Parents]: + if isinstance(r, dict) and PartitionKeyRange.Parents in r: + for parentId in r[PartitionKeyRange.Parents]: parentIds.add(parentId) - return (r for r in partitionKeyRanges if r[_PartitionKeyRange.Id] not in parentIds) + return (r for r in partitionKeyRanges if r[PartitionKeyRange.Id] not in parentIds) -class _SmartRoutingMapProvider(_PartitionKeyRangeCache): +class SmartRoutingMapProvider(PartitionKeyRangeCache): """ - Efficiently uses PartitionKeyRangeCach and minimizes the unnecessary invocation of _CollectionRoutingMap.get_overlapping_ranges() + Efficiently uses PartitionKeyRangeCach and minimizes the unnecessary invocation of CollectionRoutingMap.get_overlapping_ranges() """ def __init__(self, client): - super(_SmartRoutingMapProvider, self).__init__(client) + super(SmartRoutingMapProvider, self).__init__(client) def _second_range_is_after_first_range(self, range1, range2): @@ -112,20 +112,20 @@ def _subtract_range(self, r, partition_key_range): Evaluates and returns r - partition_key_range :param dict partition_key_range: Partition key range. - :param routing_range._Range r: query range. + :param routing_range.Range r: query range. :return: The subtract r - partition_key_range. - :rtype: routing_range._Range + :rtype: routing_range.Range """ - left = max(partition_key_range[routing_range._PartitionKeyRange.MaxExclusive], r.min) + left = max(partition_key_range[routing_range.PartitionKeyRange.MaxExclusive], r.min) if left == r.min: leftInclusive = r.isMinInclusive else: leftInclusive = False - queryRange = routing_range._Range(left, r.max, leftInclusive, + queryRange = routing_range.Range(left, r.max, leftInclusive, r.isMaxInclusive) return queryRange @@ -136,7 +136,7 @@ def get_overlapping_ranges(self, collection_link, sorted_ranges): :param str collection_link: The collection link. - :param (list of routing_range._Range) sorted_ranges: The sorted list of non-overlapping ranges. + :param (list of routing_range.Range) sorted_ranges: The sorted list of non-overlapping ranges. :return: List of partition key ranges. :rtype: list of dict @@ -163,11 +163,11 @@ def get_overlapping_ranges(self, collection_link, sorted_ranges): else: queryRange = currentProvidedRange - overlappingRanges = _PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) + overlappingRanges = PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) assert len(overlappingRanges), ("code bug: returned overlapping ranges for queryRange {} is empty".format(queryRange)) target_partition_key_ranges.extend(overlappingRanges) - lastKnownTargetRange = routing_range._Range.PartitionKeyRangeToRange(target_partition_key_ranges[-1]) + lastKnownTargetRange = routing_range.Range.PartitionKeyRangeToRange(target_partition_key_ranges[-1]) # the overlapping ranges must contain the requested range assert currentProvidedRange.max <= lastKnownTargetRange.max, "code bug: returned overlapping ranges {} does not contain the requested range {}".format(overlappingRanges, queryRange) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/routing_range.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_range.py similarity index 89% rename from sdk/cosmos/azure-cosmos/azure/cosmos/routing/routing_range.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_range.py index f2ca0d76e148..fa0678f5b57c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/routing/routing_range.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_routing/routing_range.py @@ -22,14 +22,14 @@ """Internal class for partition key range implementation in the Azure Cosmos database service. """ -class _PartitionKeyRange(object): +class PartitionKeyRange(object): """Partition Key Range Constants""" MinInclusive = 'minInclusive' MaxExclusive = 'maxExclusive' Id = 'id' Parents = 'parents' -class _Range(object): +class Range(object): """description of class""" MinPath = 'min' @@ -58,13 +58,13 @@ def contains(self, value): @classmethod def PartitionKeyRangeToRange(cls, partition_key_range): - self = cls(partition_key_range[_PartitionKeyRange.MinInclusive], partition_key_range[_PartitionKeyRange.MaxExclusive], + self = cls(partition_key_range[PartitionKeyRange.MinInclusive], partition_key_range[PartitionKeyRange.MaxExclusive], True, False) return self @classmethod def ParseFromDict(cls, range_as_dict): - self = cls(range_as_dict[_Range.MinPath], range_as_dict[_Range.MaxPath], range_as_dict[_Range.IsMinInclusivePath], range_as_dict[_Range.IsMaxInclusivePath]) + self = cls(range_as_dict[Range.MinPath], range_as_dict[Range.MaxPath], range_as_dict[Range.IsMinInclusivePath], range_as_dict[Range.IsMaxInclusivePath]) return self def isSingleValue(self): @@ -97,8 +97,8 @@ def overlaps(range1, range2): if range1 is None or range2 is None: return False if range1.isEmpty() or range2.isEmpty(): return False - cmp1 = _Range._compare_helper(range1.min, range2.max) - cmp2 = _Range._compare_helper(range2.min, range1.max) + cmp1 = Range._compare_helper(range1.min, range2.max) + cmp2 = Range._compare_helper(range2.min, range1.max) if (cmp1 <= 0 or cmp2 <= 0): if ((cmp1 == 0 and not(range1.isMinInclusive and range2.isMaxInclusive)) or (cmp2 == 0 and not(range2.isMinInclusive and range1.isMaxInclusive))): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/runtime_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/runtime_constants.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/session.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py similarity index 95% rename from sdk/cosmos/azure-cosmos/azure/cosmos/session.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_session.py index 38eece56dec2..fe7c35cdf9d6 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/session.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py @@ -25,9 +25,9 @@ import sys, traceback import threading -from . import base +from . import _base from . import http_constants -from .vector_session_token import VectorSessionToken +from ._vector_session_token import VectorSessionToken from .errors import HTTPFailure class SessionContainer(object): @@ -51,17 +51,17 @@ def get_session_token(self, resource_path): """ with self.session_lock: - is_name_based = base.IsNameBased(resource_path) + is_name_based = _base.IsNameBased(resource_path) collection_rid = '' session_token = '' try: if is_name_based: # get the collection name - collection_name = base.GetItemContainerLink(resource_path) + collection_name = _base.GetItemContainerLink(resource_path) collection_rid = self.collection_name_to_rid[collection_name] else: - collection_rid = base.GetItemContainerLink(resource_path) + collection_rid = _base.GetItemContainerLink(resource_path) if collection_rid in self.rid_to_session_token: token_dict = self.rid_to_session_token[collection_rid] @@ -112,7 +112,7 @@ def set_session_token(self, response_result, response_headers): response_result_id = response_result[response_result_id_key] else: return - collection_rid, collection_name = base.GetItemContainerInfo(self_link, alt_content_path, response_result_id) + collection_rid, collection_name = _base.GetItemContainerInfo(self_link, alt_content_path, response_result_id) except ValueError: return diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/session_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py similarity index 100% rename from sdk/cosmos/azure-cosmos/azure/cosmos/session_retry_policy.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/synchronized_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py similarity index 98% rename from sdk/cosmos/azure-cosmos/azure/cosmos/synchronized_request.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py index 9538a993580c..2ba8c1b2cdd8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/synchronized_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py @@ -30,7 +30,7 @@ from . import documents from . import errors from . import http_constants -from . import retry_utility +from . import _retry_utility def _IsReadableStream(obj): """Checks whether obj is a file-like readable stream. @@ -224,5 +224,5 @@ def SynchronizedRequest(client, request_options['headers'][http_constants.HttpHeaders.ContentLength] = 0 # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries - return retry_utility._Execute(client, global_endpoint_manager, _Request, request, connection_policy, requests_session, path, request_options, request_body) + return _retry_utility.Execute(client, global_endpoint_manager, _Request, request, connection_policy, requests_session, path, request_options, request_body) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/utils.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py similarity index 86% rename from sdk/cosmos/azure-cosmos/azure/cosmos/utils.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py index e20216199ef5..8178ef426084 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/utils.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py @@ -26,17 +26,17 @@ import re as re from . import http_constants -def _get_user_agent(): - os_name = _safe_user_agent_header(platform.system()) - os_version = _safe_user_agent_header(platform.release()) - python_version = _safe_user_agent_header(platform.python_version()) +def get_user_agent(): + os_name = safe_user_agent_header(platform.system()) + os_version = safe_user_agent_header(platform.release()) + python_version = safe_user_agent_header(platform.python_version()) user_agent = "{}/{} Python/{} {}/{}".format(os_name, os_version, python_version, http_constants.Versions.SDKName, http_constants.Versions.SDKVersion) return user_agent -def _safe_user_agent_header(s): +def safe_user_agent_header(s): if s is None: s = "unknown" # remove all white spaces diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/vector_session_token.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py similarity index 99% rename from sdk/cosmos/azure-cosmos/azure/cosmos/vector_session_token.py rename to sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py index 1dfee1974e6b..d2cd6920fc1c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/vector_session_token.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py @@ -23,7 +23,6 @@ """ from . import errors -from . import base from .http_constants import StatusCodes class VectorSessionToken(object): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index 8326d5224260..7d2b9fd35795 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -23,12 +23,12 @@ """ import six -from .cosmos_client_connection import CosmosClientConnection +from ._cosmos_client_connection import CosmosClientConnection from .errors import HTTPFailure from .http_constants import StatusCodes from .offer import Offer from .scripts import Scripts -from .query_iterable import QueryIterable +from ._query_iterable import QueryIterable from .partition_key import NonePartitionKeyValue from typing import ( Any, @@ -222,7 +222,7 @@ def read_all_items( :param populate_query_metrics: Enable returning query metrics in response headers. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of items (dicts). + :returns: An Iterable of items (dicts). """ if not feed_options: feed_options = {} # type: Dict[str, Any] @@ -264,7 +264,7 @@ def query_items_change_feed( :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of items (dicts). + :returns: An Iterable of items (dicts). """ if not feed_options: @@ -317,7 +317,7 @@ def query_items( :param populate_query_metrics: Enable returning query metrics in response headers. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of items (dicts). + :returns: An Iterable of items (dicts). You can use any value for the container name in the FROM clause, but typically the container name is used. In the examples below, the container name is "products," and is aliased as "p" for easier referencing @@ -675,7 +675,7 @@ def read_all_conflicts( :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of conflicts (dicts). + :returns: An Iterable of conflicts (dicts). """ if not feed_options: @@ -712,7 +712,7 @@ def query_conflicts( :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of conflicts (dicts). + :returns: An Iterable of conflicts (dicts). """ if not feed_options: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index f1de1976f98a..a4d5ff828734 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -23,10 +23,10 @@ """ import six -from .cosmos_client_connection import CosmosClientConnection +from ._cosmos_client_connection import CosmosClientConnection from .database import Database from .documents import ConnectionPolicy, DatabaseAccount -from .query_iterable import QueryIterable +from ._query_iterable import QueryIterable from typing import ( Any, Callable, @@ -184,7 +184,7 @@ def read_all_databases( :param populate_query_metrics: Enable returning query metrics in response headers. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of database properties (dicts). + :returns: An Iterable of database properties (dicts). """ if not feed_options: @@ -231,7 +231,7 @@ def query_databases( :param populate_query_metrics: Enable returning query metrics in response headers. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of database properties (dicts). + :returns: An Iterable of database properties (dicts). """ if not feed_options: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 68ef288106e5..21df096ca050 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -23,13 +23,13 @@ """ import six -from .cosmos_client_connection import CosmosClientConnection +from ._cosmos_client_connection import CosmosClientConnection from .container import Container from .offer import Offer from .http_constants import StatusCodes from .errors import HTTPFailure from .user import User -from .query_iterable import QueryIterable +from ._query_iterable import QueryIterable from .partition_key import PartitionKey from typing import ( @@ -332,7 +332,7 @@ def read_all_containers( :param populate_query_metrics: Enable returning query metrics in response headers. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of container properties (dicts). + :returns: An Iterable of container properties (dicts). .. literalinclude:: ../../examples/examples.py :start-after: [START list_containers] @@ -385,7 +385,7 @@ def query_containers( :param populate_query_metrics: Enable returning query metrics in response headers. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of container properties (dicts). + :returns: An Iterable of container properties (dicts). """ if not feed_options: @@ -503,7 +503,7 @@ def read_all_users( :param max_item_count: Max number of users to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of user properties (dicts). + :returns: An Iterable of user properties (dicts). """ if not feed_options: @@ -536,7 +536,7 @@ def query_users( :param max_item_count: Max number of users to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of user properties (dicts). + :returns: An Iterable of user properties (dicts). """ if not feed_options: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py index fc45f5157dfc..33be86d9004f 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/documents.py @@ -22,7 +22,7 @@ """AzureDocument classes and enums for the Azure Cosmos database service. """ -from . import retry_options +from . import _retry_options class DatabaseAccount(object): """Database account. A DatabaseAccount is the container for databases. @@ -368,7 +368,7 @@ def __init__(self): self.ProxyConfiguration = None self.EnableEndpointDiscovery = True self.PreferredLocations = [] - self.RetryOptions = retry_options.RetryOptions() + self.RetryOptions = _retry_options.RetryOptions() self.DisableSSLVerification = False self.UseMultipleWriteLocations = False diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py index 7487c36a4d3b..52f9ccfdd6e3 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py @@ -23,9 +23,9 @@ """ import six -from azure.cosmos.cosmos_client_connection import CosmosClientConnection +from azure.cosmos._cosmos_client_connection import CosmosClientConnection from .partition_key import NonePartitionKeyValue -from.query_iterable import QueryIterable +from ._query_iterable import QueryIterable from typing import ( Any, List, @@ -64,7 +64,7 @@ def list_stored_procedures( :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. - :returns: A :class:`QueryIterable` instance representing an iterable of stored procedures (dicts). + :returns: An Iterable of stored procedures (dicts). """ if not feed_options: @@ -91,7 +91,7 @@ def query_stored_procedures( :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. - :returns: A :class:`QueryIterable` instance representing an iterable of stored procedures (dicts). + :returns: An Iterable of stored procedures (dicts). """ if not feed_options: @@ -246,7 +246,7 @@ def list_triggers( :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. - :returns: A :class:`QueryIterable` instance representing an iterable of triggers (dicts). + :returns: An Iterable of triggers (dicts). """ if not feed_options: @@ -273,7 +273,7 @@ def query_triggers( :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. - :returns: A :class:`QueryIterable` instance representing an iterable of triggers (dicts). + :returns: An Iterable of triggers (dicts). """ if not feed_options: @@ -394,7 +394,7 @@ def list_user_defined_functions( :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. - :returns: A :class:`QueryIterable` instance representing an iterable of user defined functions (dicts). + :returns: An Iterable of user defined functions (dicts). """ if not feed_options: @@ -421,7 +421,7 @@ def query_user_defined_functions( :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. - :returns: A :class:`QueryIterable` instance representing an iterable of user defined functions (dicts). + :returns: An Iterable of user defined functions (dicts). """ if not feed_options: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py index f7c212c56c27..9b1eaf39236b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py @@ -23,7 +23,7 @@ """ import six -from .cosmos_client_connection import CosmosClientConnection +from ._cosmos_client_connection import CosmosClientConnection from typing import ( Any, List, @@ -99,7 +99,7 @@ def read_all_permissions( :param max_item_count: Max number of permissions to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of permissions (dicts). + :returns: An Iterable of permissions (dicts). """ if not feed_options: @@ -133,7 +133,7 @@ def query_permissions( :param max_item_count: Max number of permissions to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`QueryIterable` instance representing an iterable of permissions (dicts). + :returns: An Iterable of permissions (dicts). """ if not feed_options: diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py index 52f31029be52..a714278dc71d 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py @@ -4,7 +4,6 @@ import json import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes -import azure.cosmos.base as base class ConflictWorker(object): def __init__(self, database_name, basic_collection_name, manual_collection_name, lww_collection_name, udp_collection_name): diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py index 8b36c27ba0fe..8b97f0899838 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py @@ -3,7 +3,7 @@ from Worker import Worker from multiprocessing.pool import ThreadPool import azure.cosmos.documents as documents -import azure.cosmos.cosmos_client_connection as cosmos_client_connection +import azure.cosmos._cosmos_client_connection as cosmos_client_connection class MultiMasterScenario(object): def __init__(self): diff --git a/sdk/cosmos/azure-cosmos/test/base_unit_tests.py b/sdk/cosmos/azure-cosmos/test/base_unit_tests.py index 2dd41a646231..cd0082c5582e 100644 --- a/sdk/cosmos/azure-cosmos/test/base_unit_tests.py +++ b/sdk/cosmos/azure-cosmos/test/base_unit_tests.py @@ -1,6 +1,6 @@ import unittest import pytest -import azure.cosmos.base as base +import azure.cosmos._base as base pytestmark = pytest.mark.cosmosEmulator diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 12f1d53df464..77a20f52709c 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -41,18 +41,18 @@ import urllib.parse as urllib import uuid import pytest -import azure.cosmos.consistent_hash_ring as consistent_hash_ring +from azure.cosmos import _consistent_hash_ring import azure.cosmos.documents as documents import azure.cosmos.errors as errors from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes -import azure.cosmos.murmur_hash as murmur_hash +import azure.cosmos._murmur_hash as _murmur_hash import test_config -import azure.cosmos.base as base +import azure.cosmos._base as base import azure.cosmos.cosmos_client as cosmos_client from azure.cosmos.diagnostics import RecordDiagnostics from azure.cosmos.partition_key import PartitionKey import conftest -import azure.cosmos.retry_utility as retry_utility +from azure.cosmos import _retry_utility pytestmark = pytest.mark.cosmosEmulator @@ -298,11 +298,11 @@ def test_partitioned_collection_partition_key_extraction(self): } } - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction # create document without partition key being specified created_document = created_collection.create_item(body=document_definition) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction self.assertEquals(self.last_headers[1], '["WA"]') del self.last_headers[:] @@ -315,11 +315,11 @@ def test_partitioned_collection_partition_key_extraction(self): partition_key=PartitionKey(path='/address', kind=documents.PartitionKind.Hash) ) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction # Create document with partitionkey not present as a leaf level property but a dict created_document = created_collection1.create_item(document_definition) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction self.assertEquals(self.last_headers[1], [{}]) del self.last_headers[:] @@ -331,11 +331,11 @@ def test_partitioned_collection_partition_key_extraction(self): partition_key=PartitionKey(path='/address/state/city', kind=documents.PartitionKind.Hash) ) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction # Create document with partitionkey not present in the document created_document = created_collection2.create_item(document_definition) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction self.assertEquals(self.last_headers[1], [{}]) del self.last_headers[:] @@ -358,10 +358,10 @@ def test_partitioned_collection_partition_key_extraction_special_chars(self): "level' 1*()": {"le/vel2": 'val1'} } - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction created_document = created_collection1.create_item(body=document_definition) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction self.assertEquals(self.last_headers[1], '["val1"]') del self.last_headers[:] @@ -385,11 +385,11 @@ def test_partitioned_collection_partition_key_extraction_special_chars(self): 'level\" 1*()': {'le/vel2': 'val2'} } - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction # create document without partition key being specified created_document = created_collection2.create_item(body=document_definition) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction self.assertEquals(self.last_headers[1], '["val2"]') del self.last_headers[:] @@ -851,13 +851,13 @@ def test_murmur_hash(self): str = 'afdgdd' bytes = bytearray(str, encoding='utf-8') - hash_value = murmur_hash._MurmurHash._ComputeHash(bytes) + hash_value = _murmur_hash.MurmurHash._ComputeHash(bytes) self.assertEqual(1099701186, hash_value) num = 374.0 bytes = bytearray(pack('d', num)) - hash_value = murmur_hash._MurmurHash._ComputeHash(bytes) + hash_value = _murmur_hash.MurmurHash._ComputeHash(bytes) self.assertEqual(3717946798, hash_value) self._validate_bytes("", 0x1B873593, bytearray(b'\xEE\xA8\xA2\x67'), 1738713326) @@ -878,25 +878,25 @@ def test_murmur_hash(self): 3381504877) def _validate_bytes(self, str, seed, expected_hash_bytes, expected_value): - hash_value = murmur_hash._MurmurHash._ComputeHash(bytearray(str, encoding='utf-8'), seed) + hash_value = _murmur_hash.MurmurHash._ComputeHash(bytearray(str, encoding='utf-8'), seed) bytes = bytearray(pack('I', hash_value)) self.assertEqual(expected_value, hash_value) self.assertEqual(expected_hash_bytes, bytes) def test_get_bytes(self): - actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("documentdb") + actual_bytes = _consistent_hash_ring.ConsistentHashRing._GetBytes("documentdb") expected_bytes = bytearray(b'\x64\x6F\x63\x75\x6D\x65\x6E\x74\x64\x62') self.assertEqual(expected_bytes, actual_bytes) - actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("azure") + actual_bytes = _consistent_hash_ring.ConsistentHashRing._GetBytes("azure") expected_bytes = bytearray(b'\x61\x7A\x75\x72\x65') self.assertEqual(expected_bytes, actual_bytes) - actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("json") + actual_bytes = _consistent_hash_ring.ConsistentHashRing._GetBytes("json") expected_bytes = bytearray(b'\x6A\x73\x6F\x6E') self.assertEqual(expected_bytes, actual_bytes) - actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("nosql") + actual_bytes = _consistent_hash_ring.ConsistentHashRing._GetBytes("nosql") expected_bytes = bytearray(b'\x6E\x6F\x73\x71\x6C') self.assertEqual(expected_bytes, actual_bytes) diff --git a/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py b/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py index c387000390d9..5c07812625bf 100644 --- a/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py +++ b/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py @@ -23,13 +23,13 @@ import json import pytest -import azure.cosmos.cosmos_client_connection as cosmos_client_connection +import azure.cosmos._cosmos_client_connection as cosmos_client_connection import azure.cosmos.documents as documents import azure.cosmos.errors as errors -import azure.cosmos.constants as constants +import azure.cosmos._constants as constants from azure.cosmos.http_constants import StatusCodes -import azure.cosmos.global_endpoint_manager as global_endpoint_manager -import azure.cosmos.retry_utility as retry_utility +import azure.cosmos._global_endpoint_manager as global_endpoint_manager +from azure.cosmos import _retry_utility import test_config pytestmark = pytest.mark.cosmosEmulator @@ -132,7 +132,7 @@ def setUp(self): # Copying the original objects and functions before assigning the mock versions of them self.OriginalGetDatabaseAccountStub = global_endpoint_manager._GlobalEndpointManager._GetDatabaseAccountStub self.OriginalGlobalEndpointManager = global_endpoint_manager._GlobalEndpointManager - self.OriginalExecuteFunction = retry_utility._ExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction # Make azure-cosmos use the MockGlobalEndpointManager global_endpoint_manager._GlobalEndpointManager = MockGlobalEndpointManager @@ -141,13 +141,13 @@ def tearDown(self): # Restoring the original objects and functions global_endpoint_manager._GlobalEndpointManager = self.OriginalGlobalEndpointManager global_endpoint_manager._GlobalEndpointManager._GetDatabaseAccountStub = self.OriginalGetDatabaseAccountStub - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def MockExecuteFunction(self, function, *args, **kwargs): global location_changed if self.endpoint_discovery_retry_count == 2: - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction return (json.dumps([{ 'id': 'mock database' }]), None) else: self.endpoint_discovery_retry_count += 1 @@ -158,8 +158,8 @@ def MockGetDatabaseAccountStub(self, endpoint): raise errors.HTTPFailure(StatusCodes.SERVICE_UNAVAILABLE, "Service unavailable") def MockCreateDatabase(self, client, database): - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self.MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self.MockExecuteFunction client.CreateDatabase(database) def test_globaldb_endpoint_discovery_retry_policy(self): diff --git a/sdk/cosmos/azure-cosmos/test/globaldb_tests.py b/sdk/cosmos/azure-cosmos/test/globaldb_tests.py index 5241118dbfa7..e0e8eb41db7e 100644 --- a/sdk/cosmos/azure-cosmos/test/globaldb_tests.py +++ b/sdk/cosmos/azure-cosmos/test/globaldb_tests.py @@ -26,12 +26,12 @@ import time import pytest -import azure.cosmos.cosmos_client_connection as cosmos_client_connection +import azure.cosmos._cosmos_client_connection as cosmos_client_connection import azure.cosmos.documents as documents import azure.cosmos.errors as errors -import azure.cosmos.global_endpoint_manager as global_endpoint_manager -import azure.cosmos.endpoint_discovery_retry_policy as endpoint_discovery_retry_policy -import azure.cosmos.retry_utility as retry_utility +import azure.cosmos._global_endpoint_manager as global_endpoint_manager +from azure.cosmos import _endpoint_discovery_retry_policy +from azure.cosmos import _retry_utility from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes import test_config @@ -359,8 +359,8 @@ def test_globaldb_locational_endpoint_parser(self): def test_globaldb_endpoint_discovery_retry_policy_mock(self): client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction self.OriginalGetDatabaseAccount = client.GetDatabaseAccount client.GetDatabaseAccount = self._MockGetDatabaseAccount @@ -368,8 +368,8 @@ def test_globaldb_endpoint_discovery_retry_policy_mock(self): max_retry_attempt_count = 10 retry_after_in_milliseconds = 500 - endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy.Max_retry_attempt_count = max_retry_attempt_count - endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy.Retry_after_in_milliseconds = retry_after_in_milliseconds + _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy.Max_retry_attempt_count = max_retry_attempt_count + _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy.Retry_after_in_milliseconds = retry_after_in_milliseconds document_definition = { 'id': 'doc', 'name': 'sample document', @@ -382,7 +382,7 @@ def test_globaldb_endpoint_discovery_retry_policy_mock(self): self.test_coll['_self'], document_definition) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunction(self, function, *args, **kwargs): raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Write Forbidden", {'x-ms-substatus' : SubStatusCodes.WRITE_FORBIDDEN}) diff --git a/sdk/cosmos/azure-cosmos/test/location_cache_tests.py b/sdk/cosmos/azure-cosmos/test/location_cache_tests.py index 22e2689cded4..a2772ce270e8 100644 --- a/sdk/cosmos/azure-cosmos/test/location_cache_tests.py +++ b/sdk/cosmos/azure-cosmos/test/location_cache_tests.py @@ -4,13 +4,13 @@ from time import sleep from azure.cosmos.http_constants import ResourceType -import azure.cosmos.cosmos_client_connection as cosmos_client_connection +import azure.cosmos._cosmos_client_connection as cosmos_client_connection import azure.cosmos.documents as documents -from azure.cosmos.request_object import _RequestObject -from azure.cosmos.location_cache import LocationCache +from azure.cosmos._request_object import RequestObject +from azure.cosmos._location_cache import LocationCache import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes, SubStatusCodes, HttpHeaders -import azure.cosmos.retry_utility as retry_utility +from azure.cosmos import _retry_utility import six pytestmark = pytest.mark.cosmosEmulator @@ -74,8 +74,8 @@ def test_validate_retry_on_session_not_availabe_with_disable_multiple_write_loca def validate_retry_on_session_not_availabe_with_endpoint_discovery_disabled(self, is_preferred_locations_list_empty, use_multiple_write_locations, is_read_request): self.counter = 0 - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunctionSessionReadFailureOnce + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunctionSessionReadFailureOnce self.original_get_database_account = cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount = self.mock_create_db_with_flag_enabled if use_multiple_write_locations else self.mock_create_db_with_flag_disabled enable_endpoint_discovery = False @@ -95,7 +95,7 @@ def validate_retry_on_session_not_availabe_with_endpoint_discovery_disabled(self self.assertEqual(e.sub_status, SubStatusCodes.READ_SESSION_NOTAVAILABLE) cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount = self.original_get_database_account - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunctionSessionReadFailureOnce(self, function, *args, **kwargs): self.counter += 1 @@ -116,8 +116,8 @@ def test_validate_retry_on_session_not_availabe_with_endpoint_discovery_enabled( def validate_retry_on_session_not_availabe(self, is_preferred_locations_list_empty, use_multiple_write_locations): self.counter = 0 - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunctionSessionReadFailureTwice + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunctionSessionReadFailureTwice self.original_get_database_account = cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount = self.mock_create_db_with_flag_enabled if use_multiple_write_locations else self.mock_create_db_with_flag_disabled @@ -136,7 +136,7 @@ def validate_retry_on_session_not_availabe(self, is_preferred_locations_list_emp self.assertEqual(e.sub_status, SubStatusCodes.READ_SESSION_NOTAVAILABLE) cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount = self.original_get_database_account - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunctionSessionReadFailureTwice(self, function, *args, **kwargs): request = args[1] @@ -376,11 +376,11 @@ def validate_request_endpoint_resolution(self, use_multiple_write_locations, end def resolve_endpoint_for_read_request(self, master_resource_type): operation_type = documents._OperationType.Read resource_type = ResourceType.Database if master_resource_type else ResourceType.Document - request = _RequestObject(resource_type, operation_type) + request = RequestObject(resource_type, operation_type) return self.location_cache.resolve_service_endpoint(request) def resolve_endpoint_for_write_request(self, resource_type, use_alternate_write_endpoint): operation_type = documents._OperationType.Create - request = _RequestObject(resource_type, operation_type) + request = RequestObject(resource_type, operation_type) request.route_to_location_with_preferred_location_flag(1 if use_alternate_write_endpoint else 0, ResourceType.IsCollectionChild(resource_type)) return self.location_cache.resolve_service_endpoint(request) diff --git a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py index 8c0b5b24fa1a..11c740244dcc 100644 --- a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py +++ b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py @@ -2,9 +2,9 @@ import uuid import azure.cosmos.cosmos_client as cosmos_client import pytest -import azure.cosmos.constants as constants +import azure.cosmos._constants as constants from azure.cosmos.http_constants import HttpHeaders -import azure.cosmos.retry_utility as retry_utility +from azure.cosmos import _retry_utility import test_config from azure.cosmos.partition_key import PartitionKey @@ -31,8 +31,8 @@ def test_tentative_writes_header_not_present(self): def _validate_tentative_write_headers(self): - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction connectionPolicy = MultiMasterTests.connectionPolicy connectionPolicy.UseMultipleWriteLocations = True @@ -115,7 +115,7 @@ def _validate_tentative_write_headers(self): # Delete Database self.assertEqual(self.last_headers[10], is_allow_tentative_writes_set) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunction(self, function, *args, **kwargs): self.counter += 1 diff --git a/sdk/cosmos/azure-cosmos/test/orderby_tests.py b/sdk/cosmos/azure-cosmos/test/orderby_tests.py index d8f85a49eb03..7b84ed856aaa 100644 --- a/sdk/cosmos/azure-cosmos/test/orderby_tests.py +++ b/sdk/cosmos/azure-cosmos/test/orderby_tests.py @@ -25,8 +25,8 @@ import azure.cosmos.documents as documents from azure.cosmos.partition_key import PartitionKey import azure.cosmos.cosmos_client as cosmos_client -from azure.cosmos import query_iterable -import azure.cosmos.base as base +from azure.cosmos import _query_iterable as query_iterable +import azure.cosmos._base as base from six.moves import xrange import test_config diff --git a/sdk/cosmos/azure-cosmos/test/proxy_tests.py b/sdk/cosmos/azure-cosmos/test/proxy_tests.py index c9144b8bec9e..552f76ee301c 100644 --- a/sdk/cosmos/azure-cosmos/test/proxy_tests.py +++ b/sdk/cosmos/azure-cosmos/test/proxy_tests.py @@ -22,7 +22,7 @@ import unittest import pytest import azure.cosmos.documents as documents -import azure.cosmos.cosmos_client_connection as cosmos_client_connection +import azure.cosmos._cosmos_client_connection as cosmos_client_connection import test_config import six if six.PY2: diff --git a/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py b/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py index 4a0ac5692459..f8138b6d5a8a 100644 --- a/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py @@ -24,8 +24,8 @@ import pytest from six.moves import xrange import azure.cosmos.cosmos_client as cosmos_client -from azure.cosmos.execution_context import base_execution_context as base_execution_context -import azure.cosmos.base as base +from azure.cosmos._execution_context import base_execution_context as base_execution_context +import azure.cosmos._base as base import test_config from azure.cosmos.partition_key import PartitionKey diff --git a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py index bffe7a194d0e..f25dfce77e78 100644 --- a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py +++ b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py @@ -25,9 +25,9 @@ import pytest import azure.cosmos.documents as documents import azure.cosmos.errors as errors -import azure.cosmos.retry_options as retry_options +import azure.cosmos._retry_options as retry_options from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes -import azure.cosmos.retry_utility as retry_utility +from azure.cosmos import _retry_utility import test_config pytestmark = pytest.mark.cosmosEmulator @@ -78,9 +78,9 @@ def test_resource_throttle_retry_policy_default_retry_after(self): connection_policy = Test_retry_policy_tests.connectionPolicy connection_policy.RetryOptions = retry_options.RetryOptions(5) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction try: - retry_utility._ExecuteFunction = self._MockExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction document_definition = { 'id': 'doc', 'name': 'sample document', @@ -94,15 +94,15 @@ def test_resource_throttle_retry_policy_default_retry_after(self): self.assertGreaterEqual( self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], connection_policy.RetryOptions.MaxRetryAttemptCount * self.retry_after_in_milliseconds) finally: - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def test_resource_throttle_retry_policy_fixed_retry_after(self): connection_policy = Test_retry_policy_tests.connectionPolicy connection_policy.RetryOptions = retry_options.RetryOptions(5, 2000) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction try: - retry_utility._ExecuteFunction = self._MockExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction document_definition = { 'id': 'doc', 'name': 'sample document', @@ -117,15 +117,15 @@ def test_resource_throttle_retry_policy_fixed_retry_after(self): connection_policy.RetryOptions.MaxRetryAttemptCount * connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds) finally: - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def test_resource_throttle_retry_policy_max_wait_time(self): connection_policy = Test_retry_policy_tests.connectionPolicy connection_policy.RetryOptions = retry_options.RetryOptions(5, 2000, 3) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction try: - retry_utility._ExecuteFunction = self._MockExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction document_definition = { 'id': 'doc', 'name': 'sample document', @@ -138,7 +138,7 @@ def test_resource_throttle_retry_policy_max_wait_time(self): self.assertGreaterEqual(self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], connection_policy.RetryOptions.MaxWaitTimeInSeconds * 1000) finally: - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def test_resource_throttle_retry_policy_query(self): connection_policy = Test_retry_policy_tests.connectionPolicy @@ -150,9 +150,9 @@ def test_resource_throttle_retry_policy_query(self): self.created_collection.create_item(body=document_definition) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction try: - retry_utility._ExecuteFunction = self._MockExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunction try: list(self.created_collection.query_items( @@ -169,7 +169,7 @@ def test_resource_throttle_retry_policy_query(self): self.assertGreaterEqual(self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], connection_policy.RetryOptions.MaxRetryAttemptCount * self.retry_after_in_milliseconds) finally: - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def test_default_retry_policy_for_query(self): document_definition_1 = { 'id': 'doc1', @@ -183,9 +183,9 @@ def test_default_retry_policy_for_query(self): self.created_collection.create_item(body=document_definition_2) try: - original_execute_function = retry_utility._ExecuteFunction + original_execute_function = _retry_utility.ExecuteFunction mf = self.MockExecuteFunctionConnectionReset(original_execute_function) - retry_utility._ExecuteFunction = mf + _retry_utility.ExecuteFunction = mf docs = self.created_collection.query_items(query="Select * from c", max_item_count=1, enable_cross_partition_query=True) @@ -194,7 +194,7 @@ def test_default_retry_policy_for_query(self): self.assertEqual(result_docs[1]['id'], 'doc2') self.assertEqual(mf.counter, 12) finally: - retry_utility._ExecuteFunction = original_execute_function + _retry_utility.ExecuteFunction = original_execute_function self.created_collection.delete_item(item=result_docs[0], partition_key=result_docs[0]['id']) self.created_collection.delete_item(item=result_docs[1], partition_key=result_docs[1]['id']) @@ -207,16 +207,16 @@ def test_default_retry_policy_for_read(self): created_document = self.created_collection.create_item(body=document_definition) try: - original_execute_function = retry_utility._ExecuteFunction + original_execute_function = _retry_utility.ExecuteFunction mf = self.MockExecuteFunctionConnectionReset(original_execute_function) - retry_utility._ExecuteFunction = mf + _retry_utility.ExecuteFunction = mf doc = self.created_collection.read_item(item=created_document['id'], partition_key=created_document['id']) self.assertEqual(doc['id'], 'doc') self.assertEqual(mf.counter, 3) finally: - retry_utility._ExecuteFunction = original_execute_function + _retry_utility.ExecuteFunction = original_execute_function self.created_collection.delete_item(item=created_document, partition_key=created_document['id']) @@ -226,9 +226,9 @@ def test_default_retry_policy_for_create(self): 'key': 'value'} try: - original_execute_function = retry_utility._ExecuteFunction + original_execute_function = _retry_utility.ExecuteFunction mf = self.MockExecuteFunctionConnectionReset(original_execute_function) - retry_utility._ExecuteFunction = mf + _retry_utility.ExecuteFunction = mf created_document = {} try : @@ -241,7 +241,7 @@ def test_default_retry_policy_for_create(self): # 3 retries for readCollection. No retry for createDocument. self.assertEqual(mf.counter, 1) # TODO: The comment above implies that there should be a read in the test. But there isn't... finally: - retry_utility._ExecuteFunction = original_execute_function + _retry_utility.ExecuteFunction = original_execute_function def _MockExecuteFunction(self, function, *args, **kwargs): raise errors.HTTPFailure(StatusCodes.TOO_MANY_REQUESTS, "Request rate is too large", {HttpHeaders.RetryAfterInMilliseconds: self.retry_after_in_milliseconds}) diff --git a/sdk/cosmos/azure-cosmos/test/routing/collection_routing_map_test.py b/sdk/cosmos/azure-cosmos/test/routing/collection_routing_map_test.py index ae5fa2c011af..c6cae9afd4ce 100644 --- a/sdk/cosmos/azure-cosmos/test/routing/collection_routing_map_test.py +++ b/sdk/cosmos/azure-cosmos/test/routing/collection_routing_map_test.py @@ -21,9 +21,9 @@ import unittest import pytest -from azure.cosmos.routing.collection_routing_map import _CollectionRoutingMap -import azure.cosmos.routing.routing_range as routing_range -from azure.cosmos.routing.routing_map_provider import _PartitionKeyRangeCache +from azure.cosmos._routing.collection_routing_map import CollectionRoutingMap +import azure.cosmos._routing.routing_range as routing_range +from azure.cosmos._routing.routing_map_provider import PartitionKeyRangeCache pytestmark = pytest.mark.cosmosEmulator @@ -34,8 +34,8 @@ def test_advanced(self): partition_key_ranges = [{u'id': u'0', u'minInclusive': u'', u'maxExclusive': u'05C1C9CD673398'}, {u'id': u'1', u'minInclusive': u'05C1C9CD673398', u'maxExclusive': u'05C1D9CD673398'}, {u'id': u'2', u'minInclusive': u'05C1D9CD673398', u'maxExclusive': u'05C1E399CD6732'}, {u'id': u'3', u'minInclusive': u'05C1E399CD6732', u'maxExclusive': u'05C1E9CD673398'}, {u'id': u'4', u'minInclusive': u'05C1E9CD673398', u'maxExclusive': u'FF'}] partitionRangeWithInfo = [(r, True) for r in partition_key_ranges] - pkRange = routing_range._Range("", "FF", True, False) - collection_routing_map = _CollectionRoutingMap.CompleteRoutingMap(partitionRangeWithInfo, 'sample collection id') + pkRange = routing_range.Range("", "FF", True, False) + collection_routing_map = CollectionRoutingMap.CompleteRoutingMap(partitionRangeWithInfo, 'sample collection id') overlapping_partition_key_ranges = collection_routing_map.get_overlapping_ranges(pkRange) self.assertEqual(len(overlapping_partition_key_ranges), len(partition_key_ranges)) @@ -74,7 +74,7 @@ def get_range_id(r): return r[Id] # verify no thing is filtered out since there is no children ranges - filteredRanges = _PartitionKeyRangeCache._discard_parent_ranges(partitionKeyRanges) + filteredRanges = PartitionKeyRangeCache._discard_parent_ranges(partitionKeyRanges) self.assertEqual(['2', '0', '1', '3'], list(map(get_range_id, filteredRanges))) # add some children partition key ranges with parents Ids @@ -93,7 +93,7 @@ def get_range_id(r): Parents : ["0"]}) # verify the filtered range list has children ranges and the parent Ids are discarded - filteredRanges = _PartitionKeyRangeCache._discard_parent_ranges(partitionKeyRanges) + filteredRanges = PartitionKeyRangeCache._discard_parent_ranges(partitionKeyRanges) expectedRanges = ['2', '1', '3', '6', '7', '5'] self.assertEqual(expectedRanges, list(map(get_range_id, filteredRanges))) @@ -123,7 +123,7 @@ def test_collection_routing_map(self): 3) ] - crm = _CollectionRoutingMap.CompleteRoutingMap(partitionKeyRanges, "") + crm = CollectionRoutingMap.CompleteRoutingMap(partitionKeyRanges, "") self.assertEqual("0", crm._orderedPartitionKeyRanges[0][Id]) self.assertEqual("1", crm._orderedPartitionKeyRanges[1][Id]) @@ -144,7 +144,7 @@ def test_collection_routing_map(self): self.assertEqual("0", crm.get_range_by_partition_key_range_id("0")[Id]) self.assertEqual("1", crm.get_range_by_partition_key_range_id("1")[Id]) - fullRangeMinToMaxRange = routing_range._Range(_CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey, _CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey, True, False) + fullRangeMinToMaxRange = routing_range.Range(CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey, CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey, True, False) overlappingRanges = crm.get_overlapping_ranges([fullRangeMinToMaxRange]) self.assertEqual(4, len(overlappingRanges)) @@ -154,18 +154,18 @@ def getKey(r): onlyParitionRanges.sort(key = getKey) self.assertEqual(overlappingRanges, onlyParitionRanges) - noPoint = routing_range._Range(_CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey, _CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey, False, False) + noPoint = routing_range.Range(CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey, CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey, False, False) self.assertEqual(0, len(crm.get_overlapping_ranges([noPoint]))) - onePoint = routing_range._Range("0000000040", "0000000040", True, True) + onePoint = routing_range.Range("0000000040", "0000000040", True, True) overlappingPartitionKeyRanges = crm.get_overlapping_ranges([onePoint]) self.assertEqual(1, len(overlappingPartitionKeyRanges)) self.assertEqual("1", overlappingPartitionKeyRanges[0][Id]) ranges = [ - routing_range._Range("0000000040", "0000000045", True, True), - routing_range._Range("0000000045", "0000000046", True, True), - routing_range._Range("0000000046", "0000000050", True, True) + routing_range.Range("0000000040", "0000000045", True, True), + routing_range.Range("0000000045", "0000000046", True, True), + routing_range.Range("0000000046", "0000000050", True, True) ] overlappingPartitionKeyRanges = crm.get_overlapping_ranges(ranges) @@ -182,12 +182,12 @@ def test_invalid_routing_map(self): collectionUniqueId = "" def createRoutingMap(): - _CollectionRoutingMap.CompleteRoutingMap(partitionKeyRanges, collectionUniqueId) + CollectionRoutingMap.CompleteRoutingMap(partitionKeyRanges, collectionUniqueId) self.assertRaises(ValueError, createRoutingMap) def test_incomplete_routing_map(self): - crm = _CollectionRoutingMap.CompleteRoutingMap( + crm = CollectionRoutingMap.CompleteRoutingMap( [ ({ 'id' : "2", 'minInclusive' : "", 'maxExclusive' : "0000000030"}, 2), ({ 'id' : "3", 'minInclusive' : "0000000031", 'maxExclusive' : "FF"}, 2), @@ -195,7 +195,7 @@ def test_incomplete_routing_map(self): , "") self.assertIsNone(crm) - crm = _CollectionRoutingMap.CompleteRoutingMap( + crm = CollectionRoutingMap.CompleteRoutingMap( [ ({ 'id' : "2", 'minInclusive' : "", 'maxExclusive' : "0000000030"}, 2), ({ 'id' : "2", 'minInclusive' : "0000000030", 'maxExclusive' : "FF"}, 2), diff --git a/sdk/cosmos/azure-cosmos/test/routing/routing_map_provider_tests.py b/sdk/cosmos/azure-cosmos/test/routing/routing_map_provider_tests.py index 252c79e418d3..2601084c6fa4 100644 --- a/sdk/cosmos/azure-cosmos/test/routing/routing_map_provider_tests.py +++ b/sdk/cosmos/azure-cosmos/test/routing/routing_map_provider_tests.py @@ -21,9 +21,9 @@ import unittest import pytest -from azure.cosmos.routing.routing_map_provider import _SmartRoutingMapProvider -from azure.cosmos.routing.routing_map_provider import _CollectionRoutingMap -from azure.cosmos.routing import routing_range as routing_range +from azure.cosmos._routing.routing_map_provider import SmartRoutingMapProvider +from azure.cosmos._routing.routing_map_provider import CollectionRoutingMap +from azure.cosmos._routing import routing_range as routing_range pytestmark = pytest.mark.cosmosEmulator @@ -43,27 +43,27 @@ def setUp(self): self.smart_routing_map_provider = self.instantiate_smart_routing_map_provider(self.partition_key_ranges) partitionRangeWithInfo = map(lambda r: (r, True), self.partition_key_ranges) - self.cached_collection_routing_map = _CollectionRoutingMap.CompleteRoutingMap(partitionRangeWithInfo, 'sample collection id') + self.cached_collection_routing_map = CollectionRoutingMap.CompleteRoutingMap(partitionRangeWithInfo, 'sample collection id') def instantiate_smart_routing_map_provider(self, partition_key_ranges): client = RoutingMapProviderTests.MockedCosmosClientConnection(partition_key_ranges) - return _SmartRoutingMapProvider(client) + return SmartRoutingMapProvider(client) def test_full_range(self): # query range is the whole partition key range - pkRange = routing_range._Range("", "FF", True, False) + pkRange = routing_range.Range("", "FF", True, False) overlapping_partition_key_ranges = self.get_overlapping_ranges([pkRange]) self.assertEqual(len(overlapping_partition_key_ranges), len(self.partition_key_ranges)) self.assertEqual(overlapping_partition_key_ranges, self.partition_key_ranges) - pkRange = routing_range._Range("", "FF", False, False) + pkRange = routing_range.Range("", "FF", False, False) overlapping_partition_key_ranges = self.get_overlapping_ranges([pkRange]) self.assertEqual(overlapping_partition_key_ranges, self.partition_key_ranges) self.assertEqual(self.cached_collection_routing_map.get_overlapping_ranges([pkRange]), self.partition_key_ranges) def test_empty_ranges(self): # query range is the whole partition key range - pkRange = routing_range._Range("", "FF", True, False) + pkRange = routing_range.Range("", "FF", True, False) overlapping_partition_key_ranges = self.get_overlapping_ranges([pkRange]) self.assertEqual(len(overlapping_partition_key_ranges), len(self.partition_key_ranges)) self.assertEqual(overlapping_partition_key_ranges, self.partition_key_ranges) @@ -73,17 +73,17 @@ def test_empty_ranges(self): self.assertEqual(len(overlapping_partition_key_ranges), 0) # validate the overlaping partition key ranges results for empty ranges is empty - empty_start_range = routing_range._Range("", "", False, True) - empty_end_range = routing_range._Range("FF", "FF", False, True) - empty_range = routing_range._Range("AA", "AA", False, True) + empty_start_range = routing_range.Range("", "", False, True) + empty_end_range = routing_range.Range("FF", "FF", False, True) + empty_range = routing_range.Range("AA", "AA", False, True) self.validate_empty_query_ranges([empty_range], [empty_start_range], [empty_end_range], [empty_start_range, empty_range], [empty_start_range, empty_end_range], [empty_range, empty_end_range], [empty_range, empty_range, empty_end_range]) def test_bad_overlapping_query_ranges(self): # they share AA point - r1 = routing_range._Range("", "AA", True, True) - r2 = routing_range._Range("AA", "FF", True, False) + r1 = routing_range.Range("", "AA", True, True) + r2 = routing_range.Range("AA", "FF", True, False) def func_one_point_overlap(): self.smart_routing_map_provider.get_overlapping_ranges("sample collection id", [r1, r2]) @@ -91,16 +91,16 @@ def func_one_point_overlap(): self.assertRaises(ValueError, func_one_point_overlap) # overlapping range - r1 = routing_range._Range("", "AB", True, False) - r2 = routing_range._Range("AA", "FA", True, False) + r1 = routing_range.Range("", "AB", True, False) + r2 = routing_range.Range("AA", "FA", True, False) def func_overlap(): self.smart_routing_map_provider.get_overlapping_ranges("sample collection id", [r1, r2]) self.assertRaises(ValueError, func_overlap) - r1 = routing_range._Range("AB", "AC", True, False) - r1 = routing_range._Range("AA", "AB", True, False) + r1 = routing_range.Range("AB", "AC", True, False) + r1 = routing_range.Range("AA", "AB", True, False) def func_non_sorted(): self.smart_routing_map_provider.get_overlapping_ranges("sample collection id", [r1, r2]) @@ -108,31 +108,31 @@ def func_non_sorted(): self.assertRaises(ValueError, func_overlap) def test_empty_ranges_are_thrown_away(self): - e1 = routing_range._Range("", "", True, False) - r1 = routing_range._Range("", "AB", True, False) - e2 = routing_range._Range("AB", "AB", True, False) - r2 = routing_range._Range("AB", "AC", True, False) - e3 = routing_range._Range("AC", "AC", True, False) - e4 = routing_range._Range("AD", "AD", True, False) + e1 = routing_range.Range("", "", True, False) + r1 = routing_range.Range("", "AB", True, False) + e2 = routing_range.Range("AB", "AB", True, False) + r2 = routing_range.Range("AB", "AC", True, False) + e3 = routing_range.Range("AC", "AC", True, False) + e4 = routing_range.Range("AD", "AD", True, False) self.validate_overlapping_ranges_results([e1, r1, e2, r2, e3, e4], self.get_overlapping_ranges([r1, r2])) self.validate_against_cached_collection_results([e1, r1, e2, r2, e3, e4]) def test_simple(self): - r = routing_range._Range("AB", "AC", True, False) + r = routing_range.Range("AB", "AC", True, False) self.validate_against_cached_collection_results([r]) ranges = [ - routing_range._Range("0000000040", "0000000045", True, False), - routing_range._Range("0000000045", "0000000046", True, False), - routing_range._Range("0000000046", "0000000050", True, False) + routing_range.Range("0000000040", "0000000045", True, False), + routing_range.Range("0000000045", "0000000046", True, False), + routing_range.Range("0000000046", "0000000050", True, False) ] self.validate_against_cached_collection_results(ranges) def test_simple_boundary(self): ranges = [ - routing_range._Range("05C1C9CD673398", "05C1D9CD673398", True, False), + routing_range.Range("05C1C9CD673398", "05C1D9CD673398", True, False), ] self.validate_against_cached_collection_results(ranges) self.validate_overlapping_ranges_results(ranges, self.partition_key_ranges[1:2]) @@ -140,10 +140,10 @@ def test_simple_boundary(self): def test_two_adjacent_boundary(self): ranges = [ # self.partition_key_ranges[1] - routing_range._Range("05C1C9CD673398", "05C1D9CD673398", True, False), + routing_range.Range("05C1C9CD673398", "05C1D9CD673398", True, False), # self.partition_key_ranges[2] - routing_range._Range("05C1D9CD673398", "05C1D9CD673399", True, False), + routing_range.Range("05C1D9CD673398", "05C1D9CD673399", True, False), ] self.validate_against_cached_collection_results(ranges) self.validate_overlapping_ranges_results(ranges, self.partition_key_ranges[1:3]) @@ -151,8 +151,8 @@ def test_two_adjacent_boundary(self): def test_two_ranges_in_one_partition_key_range(self): # two ranges fall in the same partition key range ranges = [ - routing_range._Range("05C1C9CD673400", "05C1C9CD673401", True, False), - routing_range._Range("05C1C9CD673402", "05C1C9CD673403", True, False), + routing_range.Range("05C1C9CD673400", "05C1C9CD673401", True, False), + routing_range.Range("05C1C9CD673402", "05C1C9CD673403", True, False), ] self.validate_against_cached_collection_results(ranges) @@ -161,15 +161,15 @@ def test_two_ranges_in_one_partition_key_range(self): def test_complex(self): ranges = [ # all are covered by self.partition_key_ranges[1] - routing_range._Range("05C1C9CD673398", "05C1D9CD673391", True, False), - routing_range._Range("05C1D9CD673391", "05C1D9CD673392", True, False), - routing_range._Range("05C1D9CD673393", "05C1D9CD673395", True, False), - routing_range._Range("05C1D9CD673395", "05C1D9CD673395", True, False), + routing_range.Range("05C1C9CD673398", "05C1D9CD673391", True, False), + routing_range.Range("05C1D9CD673391", "05C1D9CD673392", True, False), + routing_range.Range("05C1D9CD673393", "05C1D9CD673395", True, False), + routing_range.Range("05C1D9CD673395", "05C1D9CD673395", True, False), # all are covered by self.partition_key_ranges[4]] - routing_range._Range("05C1E9CD673398", "05C1E9CD673401", True, False), - routing_range._Range("05C1E9CD673402", "05C1E9CD673403", True, False), + routing_range.Range("05C1E9CD673398", "05C1E9CD673401", True, False), + routing_range.Range("05C1E9CD673402", "05C1E9CD673403", True, False), # empty range - routing_range._Range("FF", "FF", True, False), + routing_range.Range("FF", "FF", True, False), ] self.validate_against_cached_collection_results(ranges) self.validate_overlapping_ranges_results(ranges, [self.partition_key_ranges[1], self.partition_key_ranges[4]]) diff --git a/sdk/cosmos/azure-cosmos/test/routing_map_tests.py b/sdk/cosmos/azure-cosmos/test/routing_map_tests.py index 48bbae7b173c..f2f396700be2 100644 --- a/sdk/cosmos/azure-cosmos/test/routing_map_tests.py +++ b/sdk/cosmos/azure-cosmos/test/routing_map_tests.py @@ -23,8 +23,8 @@ import pytest import azure.cosmos.documents as documents import azure.cosmos.cosmos_client as cosmos_client -from azure.cosmos.routing.routing_map_provider import _PartitionKeyRangeCache -from azure.cosmos.routing import routing_range as routing_range +from azure.cosmos._routing.routing_map_provider import PartitionKeyRangeCache +from azure.cosmos._routing import routing_range as routing_range import test_config pytestmark = pytest.mark.cosmosEmulator @@ -66,8 +66,8 @@ def test_read_partition_key_ranges(self): def test_routing_map_provider(self): partition_key_ranges = list(self.client.client_connection._ReadPartitionKeyRanges(self.collection_link)) - routing_mp = _PartitionKeyRangeCache(self.client.client_connection) - overlapping_partition_key_ranges = routing_mp.get_overlapping_ranges(self.collection_link, routing_range._Range("", "FF", True, False)) + routing_mp = PartitionKeyRangeCache(self.client.client_connection) + overlapping_partition_key_ranges = routing_mp.get_overlapping_ranges(self.collection_link, routing_range.Range("", "FF", True, False)) self.assertEqual(len(overlapping_partition_key_ranges), len(partition_key_ranges)) self.assertEqual(overlapping_partition_key_ranges, partition_key_ranges) diff --git a/sdk/cosmos/azure-cosmos/test/session_tests.py b/sdk/cosmos/azure-cosmos/test/session_tests.py index 03c28703615b..cdd0799dafd5 100644 --- a/sdk/cosmos/azure-cosmos/test/session_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_tests.py @@ -9,8 +9,8 @@ import test_config import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes, SubStatusCodes, HttpHeaders -import azure.cosmos.synchronized_request as synchronized_request -import azure.cosmos.retry_utility as retry_utility +import azure.cosmos._synchronized_request as synchronized_request +from azure.cosmos import _retry_utility pytestmark = pytest.mark.cosmosEmulator @@ -62,8 +62,8 @@ def _MockExecuteFunctionSessionReadFailureOnce(self, function, *args, **kwargs): def test_clear_session_token(self): created_document = self.created_collection.create_item(body={'id': '1' + str(uuid.uuid4()), 'pk': 'mypk'}) - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunctionSessionReadFailureOnce + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunctionSessionReadFailureOnce try: self.created_collection.read_item(item=created_document['id'], partition_key='mypk') except errors.HTTPFailure as e: @@ -71,7 +71,7 @@ def test_clear_session_token(self): 'dbs/' + self.created_db.id + '/colls/' + self.created_collection.id), "") self.assertEqual(e.status_code, StatusCodes.NOT_FOUND) self.assertEqual(e.sub_status, SubStatusCodes.READ_SESSION_NOTAVAILABLE) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunctionInvalidSessionToken(self, function, *args, **kwargs): response = {'_self':'dbs/90U1AA==/colls/90U1AJ4o6iA=/docs/90U1AJ4o6iABCT0AAAAABA==/', 'id':'1'} @@ -79,12 +79,12 @@ def _MockExecuteFunctionInvalidSessionToken(self, function, *args, **kwargs): return (response, headers) def test_internal_server_error_raised_for_invalid_session_token_received_from_server(self): - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunctionInvalidSessionToken + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunctionInvalidSessionToken try: self.created_collection.create_item(body={'id': '1' + str(uuid.uuid4()), 'pk': 'mypk'}) self.fail() except errors.HTTPFailure as e: self.assertEqual(e._http_error_message, "Could not parse the received session token: 2") self.assertEqual(e.status_code, StatusCodes.INTERNAL_SERVER_ERROR) - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction diff --git a/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py b/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py index ecdc2b1de238..76e9482ade4f 100644 --- a/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py @@ -1,6 +1,6 @@ import unittest import pytest -from azure.cosmos.vector_session_token import VectorSessionToken +from azure.cosmos._vector_session_token import VectorSessionToken from azure.cosmos.errors import CosmosError pytestmark = pytest.mark.cosmosEmulator diff --git a/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py b/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py index 4a594fed9e96..73ab656627b2 100644 --- a/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py +++ b/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py @@ -1,13 +1,13 @@ import unittest -import azure.cosmos.cosmos_client_connection as cosmos_client_connection +import azure.cosmos._cosmos_client_connection as cosmos_client_connection import pytest import azure.cosmos.documents as documents import azure.cosmos.errors as errors from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes -import azure.cosmos.retry_utility as retry_utility -import azure.cosmos.endpoint_discovery_retry_policy as endpoint_discovery_retry_policy -from azure.cosmos.request_object import _RequestObject -import azure.cosmos.global_endpoint_manager as global_endpoint_manager +from azure.cosmos import _retry_utility +from azure.cosmos import _endpoint_discovery_retry_policy +from azure.cosmos._request_object import RequestObject +import azure.cosmos._global_endpoint_manager as global_endpoint_manager import azure.cosmos.http_constants as http_constants pytestmark = pytest.mark.cosmosEmulator @@ -30,8 +30,8 @@ class TestStreamingFailover(unittest.TestCase): endpoint_sequence = [] def test_streaming_failover(self): - self.OriginalExecuteFunction = retry_utility._ExecuteFunction - retry_utility._ExecuteFunction = self._MockExecuteFunctionEndpointDiscover + self.OriginalExecuteFunction = _retry_utility.ExecuteFunction + _retry_utility.ExecuteFunction = self._MockExecuteFunctionEndpointDiscover connection_policy = documents.ConnectionPolicy() connection_policy.PreferredLocations = self.preferred_regional_endpoints connection_policy.DisableSSLVerification = True @@ -62,7 +62,7 @@ def test_streaming_failover(self): self.assertEqual(self.endpoint_sequence[i], self.WRITE_ENDPOINT2) cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount = self.original_get_database_account - retry_utility._ExecuteFunction = self.OriginalExecuteFunction + _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def mock_get_database_account(self, url_connection = None): database_account = documents.DatabaseAccount() @@ -105,16 +105,16 @@ def test_retry_policy_does_not_mark_null_locations_unavailable(self): # these functions should not be called self._read_counter = 0 self._write_counter = 0 - request = _RequestObject(http_constants.ResourceType.Document, documents._OperationType.Read) - endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) + request = RequestObject(http_constants.ResourceType.Document, documents._OperationType.Read) + endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN)) self.assertEqual(self._read_counter, 0) self.assertEqual(self._write_counter, 0) self._read_counter = 0 self._write_counter = 0 - request = _RequestObject(http_constants.ResourceType.Document, documents._OperationType.Create) - endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) + request = RequestObject(http_constants.ResourceType.Document, documents._OperationType.Create) + endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN)) self.assertEqual(self._read_counter, 0) self.assertEqual(self._write_counter, 0) diff --git a/sdk/cosmos/azure-cosmos/test/utils_tests.py b/sdk/cosmos/azure-cosmos/test/utils_tests.py index 0dc7d7b8315a..4349502fe854 100644 --- a/sdk/cosmos/azure-cosmos/test/utils_tests.py +++ b/sdk/cosmos/azure-cosmos/test/utils_tests.py @@ -21,7 +21,7 @@ import unittest import pytest -import azure.cosmos.utils as utils +import azure.cosmos._utils as _utils import platform import azure.cosmos.http_constants as http_constants @@ -33,7 +33,7 @@ class UtilsTests(unittest.TestCase): """ def test_user_agent(self): - user_agent = utils._get_user_agent() + user_agent = _utils.get_user_agent() expected_user_agent = "{}/{} Python/{} azure-cosmos/{}".format( platform.system(), platform.release(), platform.python_version(),