Skip to content

Commit

Permalink
[Test] Updates to devtools utils for storage/tables (#19040)
Browse files Browse the repository at this point in the history
  • Loading branch information
seankane-msft authored Jun 29, 2021
1 parent ab33d61 commit 20e5bdc
Show file tree
Hide file tree
Showing 66 changed files with 448 additions and 424 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -303,33 +303,6 @@ def skip_test_if_targeting_emulator(self):
return skip_test_if_targeting_emulator


class RetryCounter(object):
def __init__(self):
self.count = 0

def simple_count(self, retry_context):
self.count += 1


class ResponseCallback(object):
def __init__(self, status=None, new_status=None):
self.status = status
self.new_status = new_status
self.first = True
self.count = 0

def override_first_status(self, response):
if self.first and response.http_response.status_code == self.status:
response.http_response.status_code = self.new_status
self.first = False
self.count += 1

def override_status(self, response):
if response.http_response.status_code == self.status:
response.http_response.status_code = self.new_status
self.count += 1


class LogCaptured(object):
def __init__(self, test_case=None):
# accept the test case so that we may reset logging after capturing logs
Expand Down
267 changes: 1 addition & 266 deletions sdk/storage/azure-storage-blob/tests/_shared/testcase.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,7 @@
from __future__ import division
import os.path
import time
from datetime import datetime, timedelta

try:
import unittest.mock as mock
except ImportError:
import mock

import zlib
import math
import sys
import os
import random
import re
import logging
from devtools_testutils import (
AzureMgmtTestCase,
Expand All @@ -28,16 +16,12 @@
StorageAccountPreparer,
FakeResource,
)
from azure_devtools.scenario_tests import RecordingProcessor, AzureTestError, create_random_name
try:
from cStringIO import StringIO # Python 2
except ImportError:
from io import StringIO

from azure.core.pipeline.policies import SansIOHTTPPolicy
from azure.core.exceptions import ResourceNotFoundError, HttpResponseError
from azure.core.credentials import AccessToken
from azure.storage.blob import generate_account_sas, AccountSasPermissions, ResourceTypes
from azure.mgmt.storage.models import StorageAccount, Endpoints
try:
# Running locally - use configuration in settings_real.py
Expand All @@ -46,15 +30,9 @@
# Running on the pipeline - use fake values in order to create rg, etc.
from .settings_fake import *

try:
from devtools_testutils import mgmt_settings_real as settings
except ImportError:
from devtools_testutils import mgmt_settings_fake as settings

from .service_versions import service_version_map

import pytest

from devtools_testutils.storage import StorageTestCase

LOGGING_FORMAT = '%(asctime)s %(name)-20s %(levelname)-5s %(message)s'
os.environ['AZURE_STORAGE_ACCOUNT_NAME'] = STORAGE_ACCOUNT_NAME
Expand All @@ -63,35 +41,6 @@
os.environ['AZURE_SKIP_LIVE_RECORDING'] = os.environ.get('AZURE_SKIP_LIVE_RECORDING', None) or SKIP_LIVE_RECORDING


class FakeTokenCredential(object):
"""Protocol for classes able to provide OAuth tokens.
:param str scopes: Lets you specify the type of access needed.
"""
def __init__(self):
self.token = AccessToken("YOU SHALL NOT PASS", 0)
self.get_token_count = 0

def get_token(self, *args):
self.get_token_count += 1
return self.token


class XMSRequestIDBody(RecordingProcessor):
"""This process is used for Storage batch call only, to avoid the echo policy.
"""
def process_response(self, response):
content_type = None
for key, value in response.get('headers', {}).items():
if key.lower() == 'content-type':
content_type = (value[0] if isinstance(value, list) else value).lower()
break

if content_type and 'multipart/mixed' in content_type:
response['body']['string'] = re.sub(b"x-ms-client-request-id: [a-f0-9-]+\r\n", b"", response['body']['string'])

return response


class GlobalStorageAccountPreparer(AzureMgmtPreparer):
def __init__(self):
super(GlobalStorageAccountPreparer, self).__init__(
Expand Down Expand Up @@ -153,226 +102,12 @@ def create_resource(self, name, **kwargs):
}


class StorageTestCase(AzureMgmtTestCase):

def __init__(self, *args, **kwargs):
super(StorageTestCase, self).__init__(*args, **kwargs)
self.replay_processors.append(XMSRequestIDBody())
self.logger = logging.getLogger('azure.storage')
self.configure_logging()

def connection_string(self, account, key):
return "DefaultEndpointsProtocol=https;AcCounTName=" + account.name + ";AccOuntKey=" + str(key) + ";EndpoIntSuffix=core.windows.net"

def account_url(self, storage_account, storage_type):
"""Return an url of storage account.
:param str storage_account: Storage account name
:param str storage_type: The Storage type part of the URL. Should be "blob", or "queue", etc.
"""
try:
if storage_type == "blob":
return storage_account.primary_endpoints.blob.rstrip("/")
if storage_type == "queue":
return storage_account.primary_endpoints.queue.rstrip("/")
if storage_type == "file":
return storage_account.primary_endpoints.file.rstrip("/")
else:
raise ValueError("Unknown storage type {}".format(storage_type))
except AttributeError: # Didn't find "primary_endpoints"
return 'https://{}.{}.core.windows.net'.format(storage_account, storage_type)

def configure_logging(self):
enable_logging = ENABLE_LOGGING

self.enable_logging() if enable_logging else self.disable_logging()

def enable_logging(self):
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
self.logger.handlers = [handler]
self.logger.setLevel(logging.DEBUG)
self.logger.propagate = True
self.logger.disabled = False

def disable_logging(self):
self.logger.propagate = False
self.logger.disabled = True
self.logger.handlers = []

def sleep(self, seconds):
if self.is_live:
time.sleep(seconds)

def get_random_bytes(self, size):
# recordings don't like random stuff. making this more
# deterministic.
return b'a'*size

def get_random_text_data(self, size):
'''Returns random unicode text data exceeding the size threshold for
chunking blob upload.'''
checksum = zlib.adler32(self.qualified_test_name.encode()) & 0xffffffff
rand = random.Random(checksum)
text = u''
words = [u'hello', u'world', u'python', u'啊齄丂狛狜']
while (len(text) < size):
index = int(rand.random()*(len(words) - 1))
text = text + u' ' + words[index]

return text

@staticmethod
def _set_test_proxy(service, settings):
if settings.USE_PROXY:
service.set_proxy(
settings.PROXY_HOST,
settings.PROXY_PORT,
settings.PROXY_USER,
settings.PROXY_PASSWORD,
)

def assertNamedItemInContainer(self, container, item_name, msg=None):
def _is_string(obj):
if sys.version_info >= (3,):
return isinstance(obj, str)
else:
return isinstance(obj, basestring)
for item in container:
if _is_string(item):
if item == item_name:
return
elif isinstance(item, dict):
if item_name == item['name']:
return
elif item.name == item_name:
return
elif hasattr(item, 'snapshot') and item.snapshot == item_name:
return


standardMsg = '{0} not found in {1}'.format(
repr(item_name), [str(c) for c in container])
self.fail(self._formatMessage(msg, standardMsg))

def assertNamedItemNotInContainer(self, container, item_name, msg=None):
for item in container:
if item.name == item_name:
standardMsg = '{0} unexpectedly found in {1}'.format(
repr(item_name), repr(container))
self.fail(self._formatMessage(msg, standardMsg))

def assert_upload_progress(self, size, max_chunk_size, progress, unknown_size=False):
'''Validates that the progress chunks align with our chunking procedure.'''
index = 0
total = None if unknown_size else size
small_chunk_size = size % max_chunk_size
self.assertEqual(len(progress), math.ceil(size / max_chunk_size))
for i in progress:
self.assertTrue(i[0] % max_chunk_size == 0 or i[0] % max_chunk_size == small_chunk_size)
self.assertEqual(i[1], total)

def assert_download_progress(self, size, max_chunk_size, max_get_size, progress):
'''Validates that the progress chunks align with our chunking procedure.'''
if size <= max_get_size:
self.assertEqual(len(progress), 1)
self.assertTrue(progress[0][0], size)
self.assertTrue(progress[0][1], size)
else:
small_chunk_size = (size - max_get_size) % max_chunk_size
self.assertEqual(len(progress), 1 + math.ceil((size - max_get_size) / max_chunk_size))

self.assertTrue(progress[0][0], max_get_size)
self.assertTrue(progress[0][1], size)
for i in progress[1:]:
self.assertTrue(i[0] % max_chunk_size == 0 or i[0] % max_chunk_size == small_chunk_size)
self.assertEqual(i[1], size)

def generate_oauth_token(self):
if self.is_live:
from azure.identity import ClientSecretCredential
return ClientSecretCredential(
self.get_settings_value("TENANT_ID"),
self.get_settings_value("CLIENT_ID"),
self.get_settings_value("CLIENT_SECRET"),
)
return self.generate_fake_token()

def generate_sas_token(self):
fake_key = 'a'*30 + 'b'*30

return '?' + generate_account_sas(
account_name = 'test', # name of the storage account
account_key = fake_key, # key for the storage account
resource_types = ResourceTypes(object=True),
permission = AccountSasPermissions(read=True,list=True),
start = datetime.now() - timedelta(hours = 24),
expiry = datetime.now() + timedelta(days = 8)
)

def generate_fake_token(self):
return FakeTokenCredential()

def _get_service_version(self, **kwargs):
env_version = service_version_map.get(os.environ.get("AZURE_LIVE_TEST_SERVICE_VERSION","LATEST"))
return kwargs.pop("service_version", env_version)

def create_storage_client(self, client, *args, **kwargs):
kwargs["api_version"] = self._get_service_version(**kwargs)
kwargs["_additional_pipeline_policies"] = [ApiVersionAssertPolicy(kwargs["api_version"])]
return client(*args, **kwargs)

def create_storage_client_from_conn_str(self, client, *args, **kwargs):
kwargs["api_version"] = self._get_service_version(**kwargs)
kwargs["_additional_pipeline_policies"] = [ApiVersionAssertPolicy(kwargs["api_version"])]
return client.from_connection_string(*args, **kwargs)


class ApiVersionAssertPolicy(SansIOHTTPPolicy):
"""
Assert the ApiVersion is set properly on the response
"""

def __init__(self, api_version):
self.api_version = api_version

def on_request(self, request):
assert request.http_request.headers['x-ms-version'] == self.api_version


def not_for_emulator(test):
def skip_test_if_targeting_emulator(self):
test(self)
return skip_test_if_targeting_emulator


class RetryCounter(object):
def __init__(self):
self.count = 0

def simple_count(self, retry_context):
self.count += 1


class ResponseCallback(object):
def __init__(self, status=None, new_status=None):
self.status = status
self.new_status = new_status
self.first = True
self.count = 0

def override_first_status(self, response):
if self.first and response.http_response.status_code == self.status:
response.http_response.status_code = self.new_status
self.first = False
self.count += 1

def override_status(self, response):
if response.http_response.status_code == self.status:
response.http_response.status_code = self.new_status
self.count += 1


class LogCaptured(object):
def __init__(self, test_case=None):
# accept the test case so that we may reset logging after capturing logs
Expand Down
4 changes: 2 additions & 2 deletions sdk/storage/azure-storage-blob/tests/test_append_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
BlobSasPermissions)
from azure.storage.blob._shared.policies import StorageContentValidation

from _shared.testcase import StorageTestCase, GlobalStorageAccountPreparer, StorageAccountPreparer, \
GlobalResourceGroupPreparer
from _shared.testcase import GlobalStorageAccountPreparer, StorageAccountPreparer, GlobalResourceGroupPreparer
from devtools_testutils.storage import StorageTestCase

# ------------------------------------------------------------------------------
TEST_BLOB_PREFIX = 'blob'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
BlobClient,
)
from _shared.testcase import GlobalStorageAccountPreparer, GlobalResourceGroupPreparer, StorageAccountPreparer
from _shared.asynctestcase import AsyncStorageTestCase
from devtools_testutils.storage.aio import AsyncStorageTestCase

# ------------------------------------------------------------------------------
TEST_BLOB_PREFIX = 'blob'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
ResourceTypes,
AccountSasPermissions, generate_container_sas, ContainerClient, CustomerProvidedEncryptionKey,
)
from _shared.testcase import StorageTestCase, GlobalStorageAccountPreparer
from _shared.testcase import GlobalStorageAccountPreparer
from devtools_testutils.storage import StorageTestCase

# ------------------------------------------------------------------------------
LARGE_APPEND_BLOB_SIZE = 64 * 1024
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
generate_container_sas, CustomerProvidedEncryptionKey,
)
from _shared.testcase import GlobalStorageAccountPreparer
from _shared.asynctestcase import AsyncStorageTestCase
from devtools_testutils.storage.aio import AsyncStorageTestCase

from azure.storage.blob.aio import (
BlobServiceClient,
Expand Down
Loading

0 comments on commit 20e5bdc

Please sign in to comment.