From a25f7b08bb788ed4c4921d1a40d69ab8de5cf826 Mon Sep 17 00:00:00 2001 From: Lauren Yu <6631887+laurenyu@users.noreply.github.com> Date: Thu, 27 Jun 2019 14:17:45 -0700 Subject: [PATCH] change: add release buildspec (#30) --- CHANGELOG.md | 5 + MANIFEST.in | 10 ++ VERSION | 1 + buildspec-release.yml | 123 ++++++++++++++++++ docker/1.4.0/final/Dockerfile.cpu | 6 +- docker/1.4.0/final/Dockerfile.eia | 6 +- docker/1.4.0/final/Dockerfile.gpu | 6 +- scripts/build_all.py | 62 +++++++++ scripts/publish_all.py | 53 ++++++++ setup.py | 4 +- .../local/test_default_model_fn.py | 2 +- test/integration/local/test_gluon_hosting.py | 2 +- test/integration/local/test_hosting.py | 12 +- test/integration/local/test_onnx.py | 2 +- tox.ini | 6 + 15 files changed, 282 insertions(+), 18 deletions(-) create mode 100644 CHANGELOG.md create mode 100644 MANIFEST.in create mode 100644 VERSION create mode 100644 buildspec-release.yml create mode 100644 scripts/build_all.py create mode 100644 scripts/publish_all.py diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..1acc92f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog + +## v1.0.0 + +Initial commit diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..a357aaa --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,10 @@ +recursive-include src/sagemaker_mxnet_serving_container * + +include VERSION +include LICENSE +include README.rst + +prune test + +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..e752649 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +1.0.1.dev0 diff --git a/buildspec-release.yml b/buildspec-release.yml new file mode 100644 index 0000000..4c94b1d --- /dev/null +++ b/buildspec-release.yml @@ -0,0 +1,123 @@ +version: 0.2 + +env: + variables: + FRAMEWORK_FULL_VERSION: '1.4.0' + FRAMEWORK_SHORT_VERSION: '1.4' + AWS_DEFAULT_REGION: 'us-west-2' + ECR_REPO: 'sagemaker-mxnet-serving' + GITHUB_REPO: 'sagemaker-mxnet-serving-container' + EI_ACCELERATOR_TYPE: 'ml.eia1.medium' + GPU_INSTANCE_TYPE: 'p2.xlarge' + SETUP_FILE: 'setup_cmds.sh' + SETUP_CMDS: '#!/bin/bash\npip install --upgrade pip\npip install -U -e .\npip install -U -e .[test]' + +phases: + pre_build: + commands: + - start-dockerd + + - ACCOUNT=$(aws sts get-caller-identity --query 'Account' --output text) + + build: + commands: + # prepare the release (update versions, changelog etc.) + - git-release --prepare --min-version 1.0.1 + + # run linter + - tox -e flake8 + + # run unit tests + - tox -e py27,py36 test/unit + + # create pip archive (tar name will be something like sagemaker_mxnet_serving_container-1.0.0.tar.gz, but the Dockerfiles expect sagemaker_mxnet_serving_container.tar.gz) + - python3 setup.py sdist + - mv dist/sagemaker_mxnet_serving_container-*.tar.gz dist/sagemaker_mxnet_serving_container.tar.gz + + # build images + - python3 scripts/build_all.py --version $FRAMEWORK_FULL_VERSION --account $ACCOUNT --repo $ECR_REPO + + # run cpu integration tests + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/local --py-version 3 --processor cpu --framework-version $FRAMEWORK_FULL_VERSION --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/local --py-version 2 --processor cpu --framework-version $FRAMEWORK_FULL_VERSION --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO + + # push docker images to ECR + - python3 scripts/publish_all.py --version $FRAMEWORK_FULL_VERSION --account $ACCOUNT --repo $ECR_REPO + + # launch remote gpu instance + - create-key-pair + - launch-ec2-instance --instance-type $GPU_INSTANCE_TYPE --ami-name dlami-ubuntu + + # run gpu integration tests + - printf "$SETUP_CMDS" > $SETUP_FILE + - ecr_image="$ACCOUNT.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com/$ECR_REPO" + - cmd="IGNORE_COVERAGE=- tox -e py36 -- test/integration/local --processor gpu --framework-version $FRAMEWORK_FULL_VERSION --region $AWS_DEFAULT_REGION --docker-base-name $ecr_image" + - remote-test --test-cmd "$cmd --py-version 3" --github-repo $GITHUB_REPO --branch master --setup-file $SETUP_FILE + - remote-test --test-cmd "$cmd --py-version 2" --github-repo $GITHUB_REPO --branch master --skip-setup + + # run sagemaker integration tests + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 3 --processor cpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 3 --processor gpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 2 --processor cpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 2 --processor gpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION + + # run ei tests + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 3 --processor cpu --accelerator-type $EI_ACCELERATOR_TYPE --region $AWS_DEFAULT_REGION --docker-base-name "$ECR_REPO-eia" --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION + - IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 2 --processor cpu --accelerator-type $EI_ACCELERATOR_TYPE --region $AWS_DEFAULT_REGION --docker-base-name "$ECR_REPO-eia" --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION + + # write deployment details to file + - | + echo '[{ + "repository": "'$ECR_REPO'", + "tags": [{ + "source": "'$FRAMEWORK_FULL_VERSION'-cpu-py2", + "dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py2", "'$FRAMEWORK_SHORT_VERSION'-cpu-py2", "'$FRAMEWORK_FULL_VERSION'-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"] + },{ + "source": "'$FRAMEWORK_FULL_VERSION'-gpu-py2", + "dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py2", "'$FRAMEWORK_SHORT_VERSION'-gpu-py2", "'$FRAMEWORK_FULL_VERSION'-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"] + },{ + "source": "'$FRAMEWORK_FULL_VERSION'-cpu-py3", + "dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py3", "'$FRAMEWORK_SHORT_VERSION'-cpu-py3", "'$FRAMEWORK_FULL_VERSION'-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"] + },{ + "source": "'$FRAMEWORK_FULL_VERSION'-gpu-py3", + "dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py3", "'$FRAMEWORK_SHORT_VERSION'-gpu-py3", "'$FRAMEWORK_FULL_VERSION'-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"] + }], + "test": [ + "IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 3 --processor cpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'", + "IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 3 --processor gpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'", + "IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 2 --processor cpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'", + "IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 2 --processor gpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'" + ] + }, { + "repository": "'$ECR_REPO'-eia", + "tags": [{ + "source": "'$FRAMEWORK_FULL_VERSION'-cpu-py2", + "dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py2", "'$FRAMEWORK_SHORT_VERSION'-cpu-py2", "'$FRAMEWORK_FULL_VERSION'-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"] + },{ + "source": "'$FRAMEWORK_FULL_VERSION'-gpu-py2", + "dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py2", "'$FRAMEWORK_SHORT_VERSION'-gpu-py2", "'$FRAMEWORK_FULL_VERSION'-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"] + },{ + "source": "'$FRAMEWORK_FULL_VERSION'-cpu-py3", + "dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py3", "'$FRAMEWORK_SHORT_VERSION'-cpu-py3", "'$FRAMEWORK_FULL_VERSION'-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"] + },{ + "source": "'$FRAMEWORK_FULL_VERSION'-gpu-py3", + "dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py3", "'$FRAMEWORK_SHORT_VERSION'-gpu-py3", "'$FRAMEWORK_FULL_VERSION'-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"] + }], + "test": [ + "IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 3 --processor cpu --accelerator-type '$EI_ACCELERATOR_TYPE' --region {region} --docker-base-name '$ECR_REPO'-eia --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'", + "IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 2 --processor cpu --accelerator-type '$EI_ACCELERATOR_TYPE' --region {region} --docker-base-name '$ECR_REPO'-eia --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'" + ] + }]' > deployments.json + + # publish the release to github + - git-release --publish + + finally: + # shut down remote gpu instance + - cleanup-gpu-instances + - cleanup-key-pairs + +artifacts: + files: + - deployments.json +name: ARTIFACT_1 diff --git a/docker/1.4.0/final/Dockerfile.cpu b/docker/1.4.0/final/Dockerfile.cpu index 61fd084..d67fc6c 100644 --- a/docker/1.4.0/final/Dockerfile.cpu +++ b/docker/1.4.0/final/Dockerfile.cpu @@ -12,13 +12,13 @@ RUN apt-get update && \ WORKDIR / -COPY dist/sagemaker_mxnet_serving_container-1.0.0.tar.gz /sagemaker_mxnet_serving_container-1.0.0.tar.gz +COPY dist/sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz RUN pip install --no-cache mxnet-mkl==1.4.0 \ keras-mxnet==2.2.4.1 \ onnx==1.4.1 \ - /sagemaker_mxnet_serving_container-1.0.0.tar.gz && \ - rm /sagemaker_mxnet_serving_container-1.0.0.tar.gz + /sagemaker_mxnet_serving_container.tar.gz && \ + rm /sagemaker_mxnet_serving_container.tar.gz # This is here to make our installed version of OpenCV work. # https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394 diff --git a/docker/1.4.0/final/Dockerfile.eia b/docker/1.4.0/final/Dockerfile.eia index f189f54..e2b6aeb 100644 --- a/docker/1.4.0/final/Dockerfile.eia +++ b/docker/1.4.0/final/Dockerfile.eia @@ -12,13 +12,13 @@ RUN apt-get update && \ WORKDIR / -COPY dist/sagemaker_mxnet_serving_container-1.0.0.tar.gz /sagemaker_mxnet_serving_container-1.0.0.tar.gz +COPY dist/sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz RUN pip install --no-cache https://s3.amazonaws.com/amazonei-apachemxnet/amazonei_mxnet-1.4.0-py2.py3-none-manylinux1_x86_64.whl \ keras-mxnet==2.2.4.1 \ onnx==1.4.1 \ - /sagemaker_mxnet_serving_container-1.0.0.tar.gz && \ - rm /sagemaker_mxnet_serving_container-1.0.0.tar.gz + /sagemaker_mxnet_serving_container.tar.gz && \ + rm /sagemaker_mxnet_serving_container.tar.gz # This is here to make our installed version of OpenCV work. # https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394 diff --git a/docker/1.4.0/final/Dockerfile.gpu b/docker/1.4.0/final/Dockerfile.gpu index 948dfc4..e86e1ce 100644 --- a/docker/1.4.0/final/Dockerfile.gpu +++ b/docker/1.4.0/final/Dockerfile.gpu @@ -12,13 +12,13 @@ RUN apt-get update && \ WORKDIR / -COPY dist/sagemaker_mxnet_serving_container-1.0.0.tar.gz /sagemaker_mxnet_serving_container-1.0.0.tar.gz +COPY dist/sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz RUN pip install --no-cache mxnet-cu92mkl==1.4.0 \ keras-mxnet==2.2.4.1 \ onnx==1.4.1 \ - /sagemaker_mxnet_serving_container-1.0.0.tar.gz && \ - rm /sagemaker_mxnet_serving_container-1.0.0.tar.gz + /sagemaker_mxnet_serving_container.tar.gz && \ + rm /sagemaker_mxnet_serving_container.tar.gz # This is here to make our installed version of OpenCV work. # https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394 diff --git a/scripts/build_all.py b/scripts/build_all.py new file mode 100644 index 0000000..7f4de99 --- /dev/null +++ b/scripts/build_all.py @@ -0,0 +1,62 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import argparse +import os +import subprocess + +DEFAULT_REGION = 'us-west-2' + + +def _parse_args(): + parser = argparse.ArgumentParser() + + parser.add_argument('--account') + parser.add_argument('--version') + parser.add_argument('--repo') + parser.add_argument('--region', default=DEFAULT_REGION) + + return parser.parse_args() + + +args = _parse_args() + +build_dir = os.path.join('docker', args.version, 'final') + +# Run docker-login so we can pull the cached image +get_login_cmd = 'aws ecr get-login --no-include-email --region {} --registry-id {}'.format(args.region, args.account) +login_cmd = subprocess.check_output(get_login_cmd.split()) +print('Executing docker login command: '.format(login_cmd)) +subprocess.check_call(login_cmd.split()) + +for arch in ['cpu', 'gpu', 'eia']: + for py_version in ['2.7', '3.6']: + tag_arch = 'cpu' if arch == 'eia' else arch + tag = '{}-{}-py{}'.format(args.version, tag_arch, py_version[0]) + repo = '{}-eia'.format(args.repo) if arch == 'eia' else args.repo + dest = '{}:{}'.format(repo, tag) + + prev_image_uri = '{}.dkr.ecr.{}.amazonaws.com/{}'.format(args.account, args.region, dest) + dockerfile = os.path.join(build_dir, 'Dockerfile.{}'.format(arch)) + + build_cmd = [ + 'docker', 'build', + '-f', dockerfile, + '--cache-from', prev_image_uri, + '--build-arg', 'py_version={}'.format(py_version), + '-t', dest, + '.', + ] + print('Building docker image: {}'.format(' '.join(build_cmd))) + subprocess.check_call(build_cmd) diff --git a/scripts/publish_all.py b/scripts/publish_all.py new file mode 100644 index 0000000..3c033e6 --- /dev/null +++ b/scripts/publish_all.py @@ -0,0 +1,53 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from __future__ import absolute_import + +import argparse +import subprocess + +DEFAULT_REGION = 'us-west-2' + + +def _parse_args(): + parser = argparse.ArgumentParser() + + parser.add_argument('--account') + parser.add_argument('--version') + parser.add_argument('--repo') + parser.add_argument('--region', default=DEFAULT_REGION) + + return parser.parse_args() + + +args = _parse_args() + +for arch in ['cpu', 'gpu', 'eia']: + for py_version in ['2', '3']: + repo = '{}-eia'.format(args.repo) if arch == 'eia' else args.repo + tag_arch = 'cpu' if arch == 'eia' else arch + source = '{}:{}-{}-py{}'.format(repo, args.version, tag_arch, py_version) + dest = '{}.dkr.ecr.{}.amazonaws.com/{}'.format(args.account, args.region, source) + + tag_cmd = 'docker tag {} {}'.format(source, dest) + print('Tagging image: {}'.format(tag_cmd)) + subprocess.check_call(tag_cmd.split()) + + login_cmd = subprocess.check_output( + 'aws ecr get-login --no-include-email --registry-id {} --region {}' + .format(args.account, args.region).split()) + print('Executing docker login command: {}'.format(login_cmd)) + subprocess.check_call(login_cmd.split()) + + push_cmd = 'docker push {}'.format(dest) + print('Pushing image: {}'.format(push_cmd)) + subprocess.check_call(push_cmd.split()) diff --git a/setup.py b/setup.py index 9ca7391..af3dda8 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ def read(fname): setup( name='sagemaker_mxnet_serving_container', - version='1.0.0', + version=read('VERSION').strip(), description='Open source library for creating MXNet containers for serving on SageMaker.', packages=find_packages(where='src', exclude=('test',)), @@ -46,7 +46,7 @@ def read(fname): 'Programming Language :: Python :: 3.6', ], - install_requires=['sagemaker-inference==1.0.0'], + install_requires=['sagemaker-inference==1.0.1'], extras_require={ 'test': ['tox', 'flake8', 'pytest', 'pytest-cov', 'pytest-xdist', 'mock', 'sagemaker==1.23.0', 'docker-compose', 'mxnet==1.4.0', 'awslogs'] diff --git a/test/integration/local/test_default_model_fn.py b/test/integration/local/test_default_model_fn.py index dd6eb70..701ee85 100644 --- a/test/integration/local/test_default_model_fn.py +++ b/test/integration/local/test_default_model_fn.py @@ -39,7 +39,7 @@ def predictor(docker_image, sagemaker_local_session, local_instance_type): predictor = model.deploy(1, local_instance_type) yield predictor finally: - sagemaker_local_session.delete_endpoint(model.endpoint_name) + predictor.delete_endpoint() def test_default_model_fn(predictor): diff --git a/test/integration/local/test_gluon_hosting.py b/test/integration/local/test_gluon_hosting.py index ff967dc..f9f95fe 100644 --- a/test/integration/local/test_gluon_hosting.py +++ b/test/integration/local/test_gluon_hosting.py @@ -42,4 +42,4 @@ def test_gluon_hosting(docker_image, sagemaker_local_session, local_instance_typ output = predictor.predict(input) assert [4.0] == output finally: - sagemaker_local_session.delete_endpoint(model.endpoint_name) + predictor.delete_endpoint() diff --git a/test/integration/local/test_hosting.py b/test/integration/local/test_hosting.py index 6dd8887..e9f7ca5 100644 --- a/test/integration/local/test_hosting.py +++ b/test/integration/local/test_hosting.py @@ -12,10 +12,10 @@ # permissions and limitations under the License. from __future__ import absolute_import -import json import os from sagemaker.mxnet.model import MXNetModel +from sagemaker.predictor import StringDeserializer import local_mode_utils from test.integration import RESOURCE_PATH @@ -34,12 +34,16 @@ def test_hosting(docker_image, sagemaker_local_session, local_instance_type): image=docker_image, sagemaker_session=sagemaker_local_session) - input = json.dumps({'some': 'json'}) - with local_mode_utils.lock(): try: predictor = model.deploy(1, local_instance_type) + predictor.serializer = None + predictor.deserializer = StringDeserializer() + predictor.accept = None + predictor.content_type = None + + input = 'some data' output = predictor.predict(input) assert input == output finally: - sagemaker_local_session.delete_endpoint(model.endpoint_name) + predictor.delete_endpoint() diff --git a/test/integration/local/test_onnx.py b/test/integration/local/test_onnx.py index 8337177..1741cd4 100644 --- a/test/integration/local/test_onnx.py +++ b/test/integration/local/test_onnx.py @@ -39,7 +39,7 @@ def test_onnx_import(docker_image, sagemaker_local_session, local_instance_type) predictor = model.deploy(1, local_instance_type) output = predictor.predict(input) finally: - sagemaker_local_session.delete_endpoint(model.endpoint_name) + predictor.delete_endpoint() # Check that there is a probability for each possible class in the prediction assert len(output[0]) == 10 diff --git a/tox.ini b/tox.ini index b3c9358..2f0b69d 100644 --- a/tox.ini +++ b/tox.ini @@ -36,6 +36,12 @@ ignore = require-code = True [testenv] +passenv = + AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + AWS_SESSION_TOKEN + AWS_CONTAINER_CREDENTIALS_RELATIVE_URI + AWS_DEFAULT_REGION # {posargs} can be passed in by additional arguments specified when invoking tox. # Can be used to specify which tests to run, e.g.: tox -- -s commands =