Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

change: add release buildspec #30

Merged
merged 2 commits into from
Jun 27, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Changelog

## v1.0.0

Initial commit
10 changes: 10 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
recursive-include src/sagemaker_mxnet_serving_container *

include VERSION
include LICENSE
include README.rst

prune test

recursive-exclude * __pycache__
recursive-exclude * *.py[co]
1 change: 1 addition & 0 deletions VERSION
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
1.0.1.dev0
123 changes: 123 additions & 0 deletions buildspec-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
version: 0.2

env:
variables:
FRAMEWORK_FULL_VERSION: '1.4.0'
FRAMEWORK_SHORT_VERSION: '1.4'
AWS_DEFAULT_REGION: 'us-west-2'
ECR_REPO: 'sagemaker-mxnet-serving'
GITHUB_REPO: 'sagemaker-mxnet-serving-container'
EI_ACCELERATOR_TYPE: 'ml.eia1.medium'
GPU_INSTANCE_TYPE: 'p2.xlarge'
SETUP_FILE: 'setup_cmds.sh'
SETUP_CMDS: '#!/bin/bash\npip install --upgrade pip\npip install -U -e .\npip install -U -e .[test]'

phases:
pre_build:
commands:
- start-dockerd

- ACCOUNT=$(aws sts get-caller-identity --query 'Account' --output text)

build:
commands:
# prepare the release (update versions, changelog etc.)
- git-release --prepare --min-version 1.0.1

# run linter
- tox -e flake8

# run unit tests
- tox -e py27,py36 test/unit

# create pip archive (tar name will be something like sagemaker_mxnet_serving_container-1.0.0.tar.gz, but the Dockerfiles expect sagemaker_mxnet_serving_container.tar.gz)
- python3 setup.py sdist
- mv dist/sagemaker_mxnet_serving_container-*.tar.gz dist/sagemaker_mxnet_serving_container.tar.gz

# build images
- python3 scripts/build_all.py --version $FRAMEWORK_FULL_VERSION --account $ACCOUNT --repo $ECR_REPO

# run cpu integration tests
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/local --py-version 3 --processor cpu --framework-version $FRAMEWORK_FULL_VERSION --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/local --py-version 2 --processor cpu --framework-version $FRAMEWORK_FULL_VERSION --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO

# push docker images to ECR
- python3 scripts/publish_all.py --version $FRAMEWORK_FULL_VERSION --account $ACCOUNT --repo $ECR_REPO

# launch remote gpu instance
- create-key-pair
- launch-ec2-instance --instance-type $GPU_INSTANCE_TYPE --ami-name dlami-ubuntu

# run gpu integration tests
- printf "$SETUP_CMDS" > $SETUP_FILE
- ecr_image="$ACCOUNT.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com/$ECR_REPO"
- cmd="IGNORE_COVERAGE=- tox -e py36 -- test/integration/local --processor gpu --framework-version $FRAMEWORK_FULL_VERSION --region $AWS_DEFAULT_REGION --docker-base-name $ecr_image"
- remote-test --test-cmd "$cmd --py-version 3" --github-repo $GITHUB_REPO --branch master --setup-file $SETUP_FILE
- remote-test --test-cmd "$cmd --py-version 2" --github-repo $GITHUB_REPO --branch master --skip-setup

# run sagemaker integration tests
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 3 --processor cpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 3 --processor gpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 2 --processor cpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker -n 4 --py-version 2 --processor gpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION

# run ei tests
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 3 --processor cpu --accelerator-type $EI_ACCELERATOR_TYPE --region $AWS_DEFAULT_REGION --docker-base-name "$ECR_REPO-eia" --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION
- IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 2 --processor cpu --accelerator-type $EI_ACCELERATOR_TYPE --region $AWS_DEFAULT_REGION --docker-base-name "$ECR_REPO-eia" --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION

# write deployment details to file
- |
echo '[{
"repository": "'$ECR_REPO'",
"tags": [{
"source": "'$FRAMEWORK_FULL_VERSION'-cpu-py2",
"dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py2", "'$FRAMEWORK_SHORT_VERSION'-cpu-py2", "'$FRAMEWORK_FULL_VERSION'-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "'$FRAMEWORK_FULL_VERSION'-gpu-py2",
"dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py2", "'$FRAMEWORK_SHORT_VERSION'-gpu-py2", "'$FRAMEWORK_FULL_VERSION'-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "'$FRAMEWORK_FULL_VERSION'-cpu-py3",
"dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py3", "'$FRAMEWORK_SHORT_VERSION'-cpu-py3", "'$FRAMEWORK_FULL_VERSION'-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "'$FRAMEWORK_FULL_VERSION'-gpu-py3",
"dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py3", "'$FRAMEWORK_SHORT_VERSION'-gpu-py3", "'$FRAMEWORK_FULL_VERSION'-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
}],
"test": [
"IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 3 --processor cpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'",
"IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 3 --processor gpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'",
"IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 2 --processor cpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'",
"IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_hosting.py --py-version 2 --processor gpu --region {region} --docker-base-name '$ECR_REPO' --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'"
]
}, {
"repository": "'$ECR_REPO'-eia",
"tags": [{
"source": "'$FRAMEWORK_FULL_VERSION'-cpu-py2",
"dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py2", "'$FRAMEWORK_SHORT_VERSION'-cpu-py2", "'$FRAMEWORK_FULL_VERSION'-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "'$FRAMEWORK_FULL_VERSION'-gpu-py2",
"dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py2", "'$FRAMEWORK_SHORT_VERSION'-gpu-py2", "'$FRAMEWORK_FULL_VERSION'-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "'$FRAMEWORK_FULL_VERSION'-cpu-py3",
"dest": ["'$FRAMEWORK_FULL_VERSION'-cpu-py3", "'$FRAMEWORK_SHORT_VERSION'-cpu-py3", "'$FRAMEWORK_FULL_VERSION'-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "'$FRAMEWORK_FULL_VERSION'-gpu-py3",
"dest": ["'$FRAMEWORK_FULL_VERSION'-gpu-py3", "'$FRAMEWORK_SHORT_VERSION'-gpu-py3", "'$FRAMEWORK_FULL_VERSION'-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
}],
"test": [
"IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 3 --processor cpu --accelerator-type '$EI_ACCELERATOR_TYPE' --region {region} --docker-base-name '$ECR_REPO'-eia --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'",
"IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py --py-version 2 --processor cpu --accelerator-type '$EI_ACCELERATOR_TYPE' --region {region} --docker-base-name '$ECR_REPO'-eia --aws-id '$ACCOUNT' --framework-version '$FRAMEWORK_FULL_VERSION'"
]
}]' > deployments.json

# publish the release to github
- git-release --publish

finally:
# shut down remote gpu instance
- cleanup-gpu-instances
- cleanup-key-pairs

artifacts:
files:
- deployments.json
name: ARTIFACT_1
6 changes: 3 additions & 3 deletions docker/1.4.0/final/Dockerfile.cpu
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ RUN apt-get update && \

WORKDIR /

COPY dist/sagemaker_mxnet_serving_container-1.0.0.tar.gz /sagemaker_mxnet_serving_container-1.0.0.tar.gz
COPY dist/sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz

RUN pip install --no-cache mxnet-mkl==1.4.0 \
keras-mxnet==2.2.4.1 \
onnx==1.4.1 \
/sagemaker_mxnet_serving_container-1.0.0.tar.gz && \
rm /sagemaker_mxnet_serving_container-1.0.0.tar.gz
/sagemaker_mxnet_serving_container.tar.gz && \
rm /sagemaker_mxnet_serving_container.tar.gz

# This is here to make our installed version of OpenCV work.
# https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394
Expand Down
6 changes: 3 additions & 3 deletions docker/1.4.0/final/Dockerfile.eia
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ RUN apt-get update && \

WORKDIR /

COPY dist/sagemaker_mxnet_serving_container-1.0.0.tar.gz /sagemaker_mxnet_serving_container-1.0.0.tar.gz
COPY dist/sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz

RUN pip install --no-cache https://s3.amazonaws.com/amazonei-apachemxnet/amazonei_mxnet-1.4.0-py2.py3-none-manylinux1_x86_64.whl \
keras-mxnet==2.2.4.1 \
onnx==1.4.1 \
/sagemaker_mxnet_serving_container-1.0.0.tar.gz && \
rm /sagemaker_mxnet_serving_container-1.0.0.tar.gz
/sagemaker_mxnet_serving_container.tar.gz && \
rm /sagemaker_mxnet_serving_container.tar.gz

# This is here to make our installed version of OpenCV work.
# https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394
Expand Down
6 changes: 3 additions & 3 deletions docker/1.4.0/final/Dockerfile.gpu
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ RUN apt-get update && \

WORKDIR /

COPY dist/sagemaker_mxnet_serving_container-1.0.0.tar.gz /sagemaker_mxnet_serving_container-1.0.0.tar.gz
COPY dist/sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz

RUN pip install --no-cache mxnet-cu92mkl==1.4.0 \
keras-mxnet==2.2.4.1 \
onnx==1.4.1 \
/sagemaker_mxnet_serving_container-1.0.0.tar.gz && \
rm /sagemaker_mxnet_serving_container-1.0.0.tar.gz
/sagemaker_mxnet_serving_container.tar.gz && \
rm /sagemaker_mxnet_serving_container.tar.gz

# This is here to make our installed version of OpenCV work.
# https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394
Expand Down
62 changes: 62 additions & 0 deletions scripts/build_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import

import argparse
import os
import subprocess

DEFAULT_REGION = 'us-west-2'


def _parse_args():
parser = argparse.ArgumentParser()

parser.add_argument('--account')
parser.add_argument('--version')
parser.add_argument('--repo')
parser.add_argument('--region', default=DEFAULT_REGION)

return parser.parse_args()


args = _parse_args()

build_dir = os.path.join('docker', args.version, 'final')

# Run docker-login so we can pull the cached image
get_login_cmd = 'aws ecr get-login --no-include-email --region {} --registry-id {}'.format(args.region, args.account)
login_cmd = subprocess.check_output(get_login_cmd.split())
print('Executing docker login command: '.format(login_cmd))
subprocess.check_call(login_cmd.split())

for arch in ['cpu', 'gpu', 'eia']:
for py_version in ['2.7', '3.6']:
tag_arch = 'cpu' if arch == 'eia' else arch
tag = '{}-{}-py{}'.format(args.version, tag_arch, py_version[0])
repo = '{}-eia'.format(args.repo) if arch == 'eia' else args.repo
dest = '{}:{}'.format(repo, tag)

prev_image_uri = '{}.dkr.ecr.{}.amazonaws.com/{}'.format(args.account, args.region, dest)
dockerfile = os.path.join(build_dir, 'Dockerfile.{}'.format(arch))

build_cmd = [
'docker', 'build',
'-f', dockerfile,
'--cache-from', prev_image_uri,
'--build-arg', 'py_version={}'.format(py_version),
'-t', dest,
'.',
]
print('Building docker image: {}'.format(' '.join(build_cmd)))
subprocess.check_call(build_cmd)
53 changes: 53 additions & 0 deletions scripts/publish_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import

import argparse
import subprocess

DEFAULT_REGION = 'us-west-2'


def _parse_args():
parser = argparse.ArgumentParser()

parser.add_argument('--account')
parser.add_argument('--version')
parser.add_argument('--repo')
parser.add_argument('--region', default=DEFAULT_REGION)

return parser.parse_args()


args = _parse_args()

for arch in ['cpu', 'gpu', 'eia']:
for py_version in ['2', '3']:
repo = '{}-eia'.format(args.repo) if arch == 'eia' else args.repo
tag_arch = 'cpu' if arch == 'eia' else arch
source = '{}:{}-{}-py{}'.format(repo, args.version, tag_arch, py_version)
dest = '{}.dkr.ecr.{}.amazonaws.com/{}'.format(args.account, args.region, source)

tag_cmd = 'docker tag {} {}'.format(source, dest)
print('Tagging image: {}'.format(tag_cmd))
subprocess.check_call(tag_cmd.split())

login_cmd = subprocess.check_output(
'aws ecr get-login --no-include-email --registry-id {} --region {}'
.format(args.account, args.region).split())
print('Executing docker login command: {}'.format(login_cmd))
subprocess.check_call(login_cmd.split())

push_cmd = 'docker push {}'.format(dest)
print('Pushing image: {}'.format(push_cmd))
subprocess.check_call(push_cmd.split())
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def read(fname):

setup(
name='sagemaker_mxnet_serving_container',
version='1.0.0',
version=read('VERSION').strip(),
description='Open source library for creating MXNet containers for serving on SageMaker.',

packages=find_packages(where='src', exclude=('test',)),
Expand All @@ -46,7 +46,7 @@ def read(fname):
'Programming Language :: Python :: 3.6',
],

install_requires=['sagemaker-inference==1.0.0'],
install_requires=['sagemaker-inference==1.0.1'],
extras_require={
'test': ['tox', 'flake8', 'pytest', 'pytest-cov', 'pytest-xdist', 'mock',
'sagemaker==1.23.0', 'docker-compose', 'mxnet==1.4.0', 'awslogs']
Expand Down
2 changes: 1 addition & 1 deletion test/integration/local/test_default_model_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def predictor(docker_image, sagemaker_local_session, local_instance_type):
predictor = model.deploy(1, local_instance_type)
yield predictor
finally:
sagemaker_local_session.delete_endpoint(model.endpoint_name)
predictor.delete_endpoint()


def test_default_model_fn(predictor):
Expand Down
2 changes: 1 addition & 1 deletion test/integration/local/test_gluon_hosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ def test_gluon_hosting(docker_image, sagemaker_local_session, local_instance_typ
output = predictor.predict(input)
assert [4.0] == output
finally:
sagemaker_local_session.delete_endpoint(model.endpoint_name)
predictor.delete_endpoint()
12 changes: 8 additions & 4 deletions test/integration/local/test_hosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
# permissions and limitations under the License.
from __future__ import absolute_import

import json
import os

from sagemaker.mxnet.model import MXNetModel
from sagemaker.predictor import StringDeserializer

import local_mode_utils
from test.integration import RESOURCE_PATH
Expand All @@ -34,12 +34,16 @@ def test_hosting(docker_image, sagemaker_local_session, local_instance_type):
image=docker_image,
sagemaker_session=sagemaker_local_session)

input = json.dumps({'some': 'json'})

with local_mode_utils.lock():
try:
predictor = model.deploy(1, local_instance_type)
predictor.serializer = None
predictor.deserializer = StringDeserializer()
predictor.accept = None
predictor.content_type = None

input = 'some data'
output = predictor.predict(input)
assert input == output
finally:
sagemaker_local_session.delete_endpoint(model.endpoint_name)
predictor.delete_endpoint()
2 changes: 1 addition & 1 deletion test/integration/local/test_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def test_onnx_import(docker_image, sagemaker_local_session, local_instance_type)
predictor = model.deploy(1, local_instance_type)
output = predictor.predict(input)
finally:
sagemaker_local_session.delete_endpoint(model.endpoint_name)
predictor.delete_endpoint()

# Check that there is a probability for each possible class in the prediction
assert len(output[0]) == 10
6 changes: 6 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,12 @@ ignore =
require-code = True

[testenv]
passenv =
AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY
AWS_SESSION_TOKEN
AWS_CONTAINER_CREDENTIALS_RELATIVE_URI
AWS_DEFAULT_REGION
# {posargs} can be passed in by additional arguments specified when invoking tox.
# Can be used to specify which tests to run, e.g.: tox -- -s
commands =
Expand Down