Skip to content

Commit

Permalink
Enable optional S3 bucket creation
Browse files Browse the repository at this point in the history
This enables pipelines to create their own, and gives more flexibility
to the release cycle when using tooling that doesn't clear out
s3 buckets for you.  Also allows you to use a bucket from another
account.
  • Loading branch information
Brett Swift authored and Brett Swift committed Oct 23, 2018
1 parent 50dfa2e commit 58616c5
Show file tree
Hide file tree
Showing 8 changed files with 110 additions and 38 deletions.
2 changes: 1 addition & 1 deletion cumulus/steps/dev_tools/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
META_PIPELINE_BUCKET_REF = 'dev_tools-bucket-Ref'
META_PIPELINE_BUCKET_NAME = 'dev_tools-pipeline-bucket-name'

# Use this to get a Ref object to gain access to the pipeline bucket
META_PIPELINE_BUCKET_POLICY_REF = 'pipeline-bucket-access-policy-Ref'
Expand Down
4 changes: 2 additions & 2 deletions cumulus/steps/dev_tools/code_build_action.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from cumulus.chain import step

from cumulus.steps.dev_tools import META_PIPELINE_BUCKET_POLICY_REF, \
META_PIPELINE_BUCKET_REF
META_PIPELINE_BUCKET_NAME


class CodeBuildAction(step.Step):
Expand Down Expand Up @@ -83,7 +83,7 @@ def handle(self, chain_context):
Type='LINUX_CONTAINER',
EnvironmentVariables=[
# TODO: allow these to be injectable, or just the whole environment?
{'Name': 'PIPELINE_BUCKET', 'Value': chain_context.metadata[META_PIPELINE_BUCKET_REF]}
{'Name': 'PIPELINE_BUCKET', 'Value': chain_context.metadata[META_PIPELINE_BUCKET_NAME]}
],
)

Expand Down
50 changes: 29 additions & 21 deletions cumulus/steps/dev_tools/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,20 @@ class Pipeline(step.Step):
def __init__(self,
name,
bucket_name,
create_bucket=True,
pipeline_policies=None,
bucket_policy_statements=None,
bucket_kms_key_arn=None,
):
"""
:type bucket_name: the name of the bucket that will be created suffixed with the chaincontext instance name
:type bucket_policy_statements: [awacs.aws.Statement]
:type bucket: troposphere.s3.Bucket
:type pipeline_policies: [troposphere.iam.Policy]
:type bucket_name: the name of the bucket that will be created suffixed with the chaincontext instance name
:type bucket_kms_key_arn: ARN used to decrypt the pipeline artifacts
"""
step.Step.__init__(self)
self.create_bucket = create_bucket
self.name = name
self.bucket_name = bucket_name
self.bucket_policy_statements = bucket_policy_statements
Expand All @@ -50,20 +52,22 @@ def handle(self, chain_context):
:param chain_context:
:return:
"""
# TODO: let (force?) bucket to be injected.
pipeline_bucket = Bucket(
"PipelineBucket%s" % self.name,
BucketName=self.bucket_name,
VersioningConfiguration=VersioningConfiguration(
Status="Enabled"

if self.create_bucket:
pipeline_bucket = Bucket(
"PipelineBucket%s" % chain_context.instance_name,
BucketName=self.bucket_name,
VersioningConfiguration=VersioningConfiguration(
Status="Enabled"
)
)
)
chain_context.template.add_resource(pipeline_bucket)

default_bucket_policies = self.get_default_bucket_policy_statements(pipeline_bucket)
default_bucket_policies = self.get_default_bucket_policy_statements(self.bucket_name)

if self.bucket_policy_statements:
bucket_access_policy = self.get_bucket_policy(
pipeline_bucket=pipeline_bucket,
pipeline_bucket=self.bucket_name,
bucket_policy_statements=self.bucket_policy_statements,
)
chain_context.template.add_resource(bucket_access_policy)
Expand All @@ -78,9 +82,7 @@ def handle(self, chain_context):
)
)

chain_context.template.add_resource(pipeline_bucket_access_policy)
# pipeline_bucket could be a string or Join object.. unit test this.
chain_context.metadata[cumulus.steps.dev_tools.META_PIPELINE_BUCKET_REF] = Ref(pipeline_bucket)
chain_context.metadata[cumulus.steps.dev_tools.META_PIPELINE_BUCKET_NAME] = self.bucket_name
chain_context.metadata[cumulus.steps.dev_tools.META_PIPELINE_BUCKET_POLICY_REF] = Ref(
pipeline_bucket_access_policy)

Expand All @@ -98,12 +100,12 @@ def handle(self, chain_context):
Resource=[
troposphere.Join('', [
awacs.s3.ARN(),
Ref(pipeline_bucket),
self.bucket_name,
"/*"
]),
troposphere.Join('', [
awacs.s3.ARN(),
Ref(pipeline_bucket),
self.bucket_name,
]),
],
),
Expand Down Expand Up @@ -173,7 +175,7 @@ def handle(self, chain_context):
Stages=[],
ArtifactStore=codepipeline.ArtifactStore(
Type="S3",
Location=Ref(pipeline_bucket),
Location=self.bucket_name,
)
# TODO: optionally add kms key here
)
Expand All @@ -191,11 +193,17 @@ def handle(self, chain_context):
Description="Code Pipeline",
Value=Ref(generic_pipeline),
)
pipeline_bucket_output = troposphere.Output(
"PipelineBucket",
Description="Name of the input artifact bucket for the pipeline",
Value=self.bucket_name,
)

chain_context.template.add_resource(pipeline_bucket)
chain_context.template.add_resource(pipeline_bucket_access_policy)
chain_context.template.add_resource(pipeline_service_role)
chain_context.template.add_resource(generic_pipeline)
chain_context.template.add_output(pipeline_output)
chain_context.template.add_output(pipeline_bucket_output)

def get_default_bucket_policy_statements(self, pipeline_bucket):
bucket_policy_statements = [
Expand All @@ -208,7 +216,7 @@ def get_default_bucket_policy_statements(self, pipeline_bucket):
Resource=[
troposphere.Join('', [
awacs.s3.ARN(),
Ref(pipeline_bucket),
pipeline_bucket,
]),
],
),
Expand Down Expand Up @@ -236,7 +244,7 @@ def get_default_bucket_policy_statements(self, pipeline_bucket):
Resource=[
troposphere.Join('', [
awacs.s3.ARN(),
Ref(pipeline_bucket),
pipeline_bucket,
'/*'
]),
],
Expand All @@ -248,7 +256,7 @@ def get_default_bucket_policy_statements(self, pipeline_bucket):
def get_bucket_policy(self, pipeline_bucket, bucket_policy_statements):
policy = troposphere.s3.BucketPolicy(
"PipelineBucketPolicy",
Bucket=troposphere.Ref(pipeline_bucket),
Bucket=pipeline_bucket,
PolicyDocument=awacs.aws.Policy(
Statement=bucket_policy_statements,
),
Expand Down
Empty file.
45 changes: 45 additions & 0 deletions cumulus/steps/storage/s3bucket.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from troposphere.s3 import Bucket, VersioningConfiguration

from cumulus.chain import step


class S3Bucket(step.Step):
def __init__(self,
logical_name,
bucket_name,
# bucket_policy_statements=None,
):
"""
:type bucket_name: the name of the bucket that will be created suffixed with the chaincontext instance name
:type bucket_policy_statements: [awacs.aws.Statement]
"""
step.Step.__init__(self)
self.logical_name = logical_name
self.bucket_name = bucket_name
# TODO: this property is a vistigial one from when this was ripped out of the pipeline,
# however, leaving it here as it is surely useful if you want to just create a bucket
# with some policies.
# self.bucket_policy_statements = bucket_policy_statements

def handle(self, chain_context):
"""
This step adds in the shell of a pipeline.
* s3 bucket
* policies for the bucket and pipeline
* your next step in the chain MUST be a source stage
:param chain_context:
:return:
"""

bucket = Bucket(
self.logical_name,
BucketName=self.bucket_name,
VersioningConfiguration=VersioningConfiguration(
Status="Enabled"
)
)

chain_context.template.add_resource(bucket)

print("Added bucket: " + self.logical_name)
22 changes: 18 additions & 4 deletions tests/stacker_test/blueprints/pipeline_simple.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import troposphere
from cumulus.steps.storage.s3bucket import S3Bucket
from stacker.blueprints.base import Blueprint
import troposphere.codebuild

Expand All @@ -17,7 +18,6 @@ class PipelineSimple(Blueprint):
}

def create_template(self):

t = self.template
t.add_description("Acceptance Tests for cumulus pipelines")

Expand All @@ -34,10 +34,24 @@ def create_template(self):
"automatedtests"
])

the_chain.add(pipeline.Pipeline(
bucket = S3Bucket(
logical_name="PipelineBucket",
bucket_name=pipeline_bucket_name,
)
# expected
# cumulus-acc-964705782699-automatedtests
# actual
# acc-964705782699-automatedtests

the_chain.add(bucket)

the_pipeline = pipeline.Pipeline(
name=self.name,
bucket_name=pipeline_bucket_name,
))
create_bucket=False,
)

the_chain.add(the_pipeline)

source_stage_name = "SourceStage"
deploy_stage_name = "DeployStage"
Expand All @@ -52,7 +66,7 @@ def create_template(self):
action_name="MicroserviceSource",
output_artifact_name=service_artifact,
s3_bucket_name=pipeline_bucket_name,
s3_object_key="artifact.tar.gz"
s3_object_key="artifact.zip"
)
)

Expand Down
5 changes: 5 additions & 0 deletions tests/stacker_test/conf/acceptance.env
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,8 @@
# http://stacker.readthedocs.io/en/latest/environments.html
namespace: acc
env: ac
VpcId: vpc-894b89ef
BaseDomain: playpen.dsl.aws.shaw.ca
PrivateSubnets: subnet-7b8cba32,subnet-ed041b8a
SshKeyName: stc-admin-March-2017-PLAYPEN
ALBCertName: ${ssmstore us-west-2@/simpleweb/bswift/ALBCertName}
20 changes: 10 additions & 10 deletions tests/stacker_test/run-integration.sh
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
#!/usr/bin/env bash

#set -x
ACCOUNT_ID=`aws sts get-caller-identity | jq .Account | tr -d '"' `
NAMESPACE=acc # must match the namespace in the conf file
BUCKET="cumulus-${NAMESPACE}-${ACCOUNT_ID}-automatedtests"

echo "Using account: ${ACCOUNT_ID}"
echo "Using bucket: ${BUCKET}"

set -e #Important. Script will exit appropriately if there is an error.

stacker build conf/acceptance.env stacker.yaml --recreate-failed -t
stacker build conf/acceptance.env stacker.yaml --recreate-failed -t --stacks pipelinesimple

ARTIFACT_NAME='artifact.tar.gz'
BUCKET_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineBucket: | cut -f 3 -d " ")
PIPELINE_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineName | cut -f 3 -d " ")

ARTIFACT_NAME='artifact.zip'
TEMP_DIR='ac_build'

pushd ../../ # move to main folder
mkdir -p ${TEMP_DIR}
zip -r ${TEMP_DIR}/${ARTIFACT_NAME} ./ -x *.git* *./${TEMP_DIR}* *.eggs* *.idea* *.tox*
zip -r ${TEMP_DIR}/${ARTIFACT_NAME} ./ -x *.git* *./${TEMP_DIR}* *.eggs* *.egg-* *.idea* *.tox* *tests* *docs*

aws s3 cp ./${TEMP_DIR}/${ARTIFACT_NAME} s3://${BUCKET}/${ARTIFACT_NAME}
aws s3 cp ./${TEMP_DIR}/${ARTIFACT_NAME} s3://${BUCKET_NAME}/${ARTIFACT_NAME}

rm -rf ${TEMP_DIR}
popd # return to test folder

PIPELINE_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineLogicalName | cut -f 3 -d " ")

echo "Pipeline deployment started for pipeline: ${PIPELINE_NAME}"

# get shasum from expected and actual output. When they match we are at approval state
Expand Down Expand Up @@ -60,8 +60,8 @@ done

SHOULD_DESTROY=false
if $SHOULD_DESTROY; then
aws s3 rm s3://${BUCKET} --recursive
python delete_bucket_versions.py ${BUCKET}
aws s3 rm s3://${BUCKET_NAME} --recursive
python delete_bucket_versions.py ${BUCKET_NAME}

stacker destroy conf/acceptance.env stacker.yaml --force -t
fi
Expand Down

0 comments on commit 58616c5

Please sign in to comment.