From 791eabb60dafe13538d4097384fdee7ea74cfe55 Mon Sep 17 00:00:00 2001 From: Brett Swift Date: Tue, 23 Oct 2018 08:28:14 -0600 Subject: [PATCH] Enable optional S3 bucket creation This enables pipelines to create their own, and gives more flexibility to the release cycle when using tooling that doesn't clear out s3 buckets for you. Also allows you to use a bucket from another account. --- cumulus/steps/dev_tools/__init__.py | 2 +- cumulus/steps/dev_tools/code_build_action.py | 4 +- cumulus/steps/dev_tools/pipeline.py | 51 +++++++++++-------- cumulus/steps/storage/__init__.py | 0 cumulus/steps/storage/s3bucket.py | 45 ++++++++++++++++ .../blueprints/pipeline_simple.py | 22 ++++++-- tests/stacker_test/conf/acceptance.env | 5 ++ tests/stacker_test/run-integration.sh | 20 ++++---- 8 files changed, 111 insertions(+), 38 deletions(-) create mode 100644 cumulus/steps/storage/__init__.py create mode 100644 cumulus/steps/storage/s3bucket.py diff --git a/cumulus/steps/dev_tools/__init__.py b/cumulus/steps/dev_tools/__init__.py index 952fdd2..a84284b 100644 --- a/cumulus/steps/dev_tools/__init__.py +++ b/cumulus/steps/dev_tools/__init__.py @@ -1,4 +1,4 @@ -META_PIPELINE_BUCKET_REF = 'dev_tools-bucket-Ref' +META_PIPELINE_BUCKET_NAME = 'dev_tools-pipeline-bucket-name' # Use this to get a Ref object to gain access to the pipeline bucket META_PIPELINE_BUCKET_POLICY_REF = 'pipeline-bucket-access-policy-Ref' diff --git a/cumulus/steps/dev_tools/code_build_action.py b/cumulus/steps/dev_tools/code_build_action.py index 77923d4..4d04068 100644 --- a/cumulus/steps/dev_tools/code_build_action.py +++ b/cumulus/steps/dev_tools/code_build_action.py @@ -19,7 +19,7 @@ from cumulus.chain import step from cumulus.steps.dev_tools import META_PIPELINE_BUCKET_POLICY_REF, \ - META_PIPELINE_BUCKET_REF + META_PIPELINE_BUCKET_NAME class CodeBuildAction(step.Step): @@ -83,7 +83,7 @@ def handle(self, chain_context): Type='LINUX_CONTAINER', EnvironmentVariables=[ # TODO: allow these to be injectable, or just the whole environment? - {'Name': 'PIPELINE_BUCKET', 'Value': chain_context.metadata[META_PIPELINE_BUCKET_REF]} + {'Name': 'PIPELINE_BUCKET', 'Value': chain_context.metadata[META_PIPELINE_BUCKET_NAME]} ], ) diff --git a/cumulus/steps/dev_tools/pipeline.py b/cumulus/steps/dev_tools/pipeline.py index 7f56a17..64a4c7f 100644 --- a/cumulus/steps/dev_tools/pipeline.py +++ b/cumulus/steps/dev_tools/pipeline.py @@ -23,20 +23,23 @@ class Pipeline(step.Step): def __init__(self, name, bucket_name, + create_bucket=True, pipeline_policies=None, bucket_policy_statements=None, bucket_kms_key_arn=None, ): """ + :type create_bucket: bool if False, will not create the bucket. Will attach policies either way. + :type bucket_name: the name of the bucket that will be created suffixed with the chaincontext instance name :type bucket_policy_statements: [awacs.aws.Statement] - :type bucket: troposphere.s3.Bucket :type pipeline_policies: [troposphere.iam.Policy] - :type bucket_name: the name of the bucket that will be created suffixed with the chaincontext instance name + :type bucket_kms_key_arn: ARN used to decrypt the pipeline artifacts """ step.Step.__init__(self) self.name = name self.bucket_name = bucket_name + self.create_bucket = create_bucket self.bucket_policy_statements = bucket_policy_statements self.pipeline_policies = pipeline_policies or [] self.bucket_kms_key_arn = bucket_kms_key_arn @@ -50,20 +53,22 @@ def handle(self, chain_context): :param chain_context: :return: """ - # TODO: let (force?) bucket to be injected. - pipeline_bucket = Bucket( - "PipelineBucket%s" % self.name, - BucketName=self.bucket_name, - VersioningConfiguration=VersioningConfiguration( - Status="Enabled" + + if self.create_bucket: + pipeline_bucket = Bucket( + "PipelineBucket%s" % chain_context.instance_name, + BucketName=self.bucket_name, + VersioningConfiguration=VersioningConfiguration( + Status="Enabled" + ) ) - ) + chain_context.template.add_resource(pipeline_bucket) - default_bucket_policies = self.get_default_bucket_policy_statements(pipeline_bucket) + default_bucket_policies = self.get_default_bucket_policy_statements(self.bucket_name) if self.bucket_policy_statements: bucket_access_policy = self.get_bucket_policy( - pipeline_bucket=pipeline_bucket, + pipeline_bucket=self.bucket_name, bucket_policy_statements=self.bucket_policy_statements, ) chain_context.template.add_resource(bucket_access_policy) @@ -78,9 +83,7 @@ def handle(self, chain_context): ) ) - chain_context.template.add_resource(pipeline_bucket_access_policy) - # pipeline_bucket could be a string or Join object.. unit test this. - chain_context.metadata[cumulus.steps.dev_tools.META_PIPELINE_BUCKET_REF] = Ref(pipeline_bucket) + chain_context.metadata[cumulus.steps.dev_tools.META_PIPELINE_BUCKET_NAME] = self.bucket_name chain_context.metadata[cumulus.steps.dev_tools.META_PIPELINE_BUCKET_POLICY_REF] = Ref( pipeline_bucket_access_policy) @@ -98,12 +101,12 @@ def handle(self, chain_context): Resource=[ troposphere.Join('', [ awacs.s3.ARN(), - Ref(pipeline_bucket), + self.bucket_name, "/*" ]), troposphere.Join('', [ awacs.s3.ARN(), - Ref(pipeline_bucket), + self.bucket_name, ]), ], ), @@ -173,7 +176,7 @@ def handle(self, chain_context): Stages=[], ArtifactStore=codepipeline.ArtifactStore( Type="S3", - Location=Ref(pipeline_bucket), + Location=self.bucket_name, ) # TODO: optionally add kms key here ) @@ -191,11 +194,17 @@ def handle(self, chain_context): Description="Code Pipeline", Value=Ref(generic_pipeline), ) + pipeline_bucket_output = troposphere.Output( + "PipelineBucket", + Description="Name of the input artifact bucket for the pipeline", + Value=self.bucket_name, + ) - chain_context.template.add_resource(pipeline_bucket) + chain_context.template.add_resource(pipeline_bucket_access_policy) chain_context.template.add_resource(pipeline_service_role) chain_context.template.add_resource(generic_pipeline) chain_context.template.add_output(pipeline_output) + chain_context.template.add_output(pipeline_bucket_output) def get_default_bucket_policy_statements(self, pipeline_bucket): bucket_policy_statements = [ @@ -208,7 +217,7 @@ def get_default_bucket_policy_statements(self, pipeline_bucket): Resource=[ troposphere.Join('', [ awacs.s3.ARN(), - Ref(pipeline_bucket), + pipeline_bucket, ]), ], ), @@ -236,7 +245,7 @@ def get_default_bucket_policy_statements(self, pipeline_bucket): Resource=[ troposphere.Join('', [ awacs.s3.ARN(), - Ref(pipeline_bucket), + pipeline_bucket, '/*' ]), ], @@ -248,7 +257,7 @@ def get_default_bucket_policy_statements(self, pipeline_bucket): def get_bucket_policy(self, pipeline_bucket, bucket_policy_statements): policy = troposphere.s3.BucketPolicy( "PipelineBucketPolicy", - Bucket=troposphere.Ref(pipeline_bucket), + Bucket=pipeline_bucket, PolicyDocument=awacs.aws.Policy( Statement=bucket_policy_statements, ), diff --git a/cumulus/steps/storage/__init__.py b/cumulus/steps/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cumulus/steps/storage/s3bucket.py b/cumulus/steps/storage/s3bucket.py new file mode 100644 index 0000000..22f4991 --- /dev/null +++ b/cumulus/steps/storage/s3bucket.py @@ -0,0 +1,45 @@ +from troposphere.s3 import Bucket, VersioningConfiguration + +from cumulus.chain import step + + +class S3Bucket(step.Step): + def __init__(self, + logical_name, + bucket_name, + # bucket_policy_statements=None, + ): + """ + + :type bucket_name: the name of the bucket that will be created suffixed with the chaincontext instance name + :type bucket_policy_statements: [awacs.aws.Statement] + """ + step.Step.__init__(self) + self.logical_name = logical_name + self.bucket_name = bucket_name + # TODO: this property is a vistigial one from when this was ripped out of the pipeline, + # however, leaving it here as it is surely useful if you want to just create a bucket + # with some policies. + # self.bucket_policy_statements = bucket_policy_statements + + def handle(self, chain_context): + """ + This step adds in the shell of a pipeline. + * s3 bucket + * policies for the bucket and pipeline + * your next step in the chain MUST be a source stage + :param chain_context: + :return: + """ + + bucket = Bucket( + self.logical_name, + BucketName=self.bucket_name, + VersioningConfiguration=VersioningConfiguration( + Status="Enabled" + ) + ) + + chain_context.template.add_resource(bucket) + + print("Added bucket: " + self.logical_name) diff --git a/tests/stacker_test/blueprints/pipeline_simple.py b/tests/stacker_test/blueprints/pipeline_simple.py index 6c516b9..88625da 100644 --- a/tests/stacker_test/blueprints/pipeline_simple.py +++ b/tests/stacker_test/blueprints/pipeline_simple.py @@ -1,4 +1,5 @@ import troposphere +from cumulus.steps.storage.s3bucket import S3Bucket from stacker.blueprints.base import Blueprint import troposphere.codebuild @@ -17,7 +18,6 @@ class PipelineSimple(Blueprint): } def create_template(self): - t = self.template t.add_description("Acceptance Tests for cumulus pipelines") @@ -34,10 +34,24 @@ def create_template(self): "automatedtests" ]) - the_chain.add(pipeline.Pipeline( + bucket = S3Bucket( + logical_name="PipelineBucket", + bucket_name=pipeline_bucket_name, + ) + # expected + # cumulus-acc-964705782699-automatedtests + # actual + # acc-964705782699-automatedtests + + the_chain.add(bucket) + + the_pipeline = pipeline.Pipeline( name=self.name, bucket_name=pipeline_bucket_name, - )) + create_bucket=False, + ) + + the_chain.add(the_pipeline) source_stage_name = "SourceStage" deploy_stage_name = "DeployStage" @@ -52,7 +66,7 @@ def create_template(self): action_name="MicroserviceSource", output_artifact_name=service_artifact, s3_bucket_name=pipeline_bucket_name, - s3_object_key="artifact.tar.gz" + s3_object_key="artifact.zip" ) ) diff --git a/tests/stacker_test/conf/acceptance.env b/tests/stacker_test/conf/acceptance.env index cc16579..4bade5b 100644 --- a/tests/stacker_test/conf/acceptance.env +++ b/tests/stacker_test/conf/acceptance.env @@ -2,3 +2,8 @@ # http://stacker.readthedocs.io/en/latest/environments.html namespace: acc env: ac +VpcId: vpc-894b89ef +BaseDomain: playpen.dsl.aws.shaw.ca +PrivateSubnets: subnet-7b8cba32,subnet-ed041b8a +SshKeyName: stc-admin-March-2017-PLAYPEN +ALBCertName: ${ssmstore us-west-2@/simpleweb/bswift/ALBCertName} diff --git a/tests/stacker_test/run-integration.sh b/tests/stacker_test/run-integration.sh index e10e0e9..8cf548e 100755 --- a/tests/stacker_test/run-integration.sh +++ b/tests/stacker_test/run-integration.sh @@ -1,30 +1,30 @@ #!/usr/bin/env bash +#set -x ACCOUNT_ID=`aws sts get-caller-identity | jq .Account | tr -d '"' ` NAMESPACE=acc # must match the namespace in the conf file -BUCKET="cumulus-${NAMESPACE}-${ACCOUNT_ID}-automatedtests" echo "Using account: ${ACCOUNT_ID}" -echo "Using bucket: ${BUCKET}" set -e #Important. Script will exit appropriately if there is an error. -stacker build conf/acceptance.env stacker.yaml --recreate-failed -t +stacker build conf/acceptance.env stacker.yaml --recreate-failed -t --stacks pipelinesimple -ARTIFACT_NAME='artifact.tar.gz' +BUCKET_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineBucket: | cut -f 3 -d " ") +PIPELINE_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineName | cut -f 3 -d " ") + +ARTIFACT_NAME='artifact.zip' TEMP_DIR='ac_build' pushd ../../ # move to main folder mkdir -p ${TEMP_DIR} -zip -r ${TEMP_DIR}/${ARTIFACT_NAME} ./ -x *.git* *./${TEMP_DIR}* *.eggs* *.idea* *.tox* +zip -r ${TEMP_DIR}/${ARTIFACT_NAME} ./ -x *.git* *./${TEMP_DIR}* *.eggs* *.egg-* *.idea* *.tox* *tests* *docs* -aws s3 cp ./${TEMP_DIR}/${ARTIFACT_NAME} s3://${BUCKET}/${ARTIFACT_NAME} +aws s3 cp ./${TEMP_DIR}/${ARTIFACT_NAME} s3://${BUCKET_NAME}/${ARTIFACT_NAME} rm -rf ${TEMP_DIR} popd # return to test folder -PIPELINE_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineLogicalName | cut -f 3 -d " ") - echo "Pipeline deployment started for pipeline: ${PIPELINE_NAME}" # get shasum from expected and actual output. When they match we are at approval state @@ -60,8 +60,8 @@ done SHOULD_DESTROY=false if $SHOULD_DESTROY; then - aws s3 rm s3://${BUCKET} --recursive - python delete_bucket_versions.py ${BUCKET} + aws s3 rm s3://${BUCKET_NAME} --recursive + python delete_bucket_versions.py ${BUCKET_NAME} stacker destroy conf/acceptance.env stacker.yaml --force -t fi