Skip to content

Commit

Permalink
feat(s3-deployment): exclude and include filters (#16054)
Browse files Browse the repository at this point in the history
This construct only enables integration with the two existing CLI options (`--exclude` and `--include`) that are supported for the `s3 sync` command. There are a few situations where this can prove valuable:

1. Situations where you want to deploy a subset of files from an archive - This can be handled by leveraging the bundling option for a source, although in some situations the `exclude` filter would be significantly easier.
2. Situations where you want to leverage `prune` but have specific files excluded - This is the situation that cannot be solved with current tools.  The most common scenario (and one I detailed in #14362 ) is where you manage a web app config file with a custom CloudFormation resource (to pass in API endpoint, user pool, etc...) and then manage a web application using this construct.  

Closes #14362 

----

*By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license*
  • Loading branch information
davidtucker authored Aug 19, 2021
1 parent 3b2c790 commit d42e89e
Show file tree
Hide file tree
Showing 10 changed files with 408 additions and 39 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -108,15 +108,15 @@
"VpcPublicSubnet1NATGateway4D7517AA": {
"Type": "AWS::EC2::NatGateway",
"Properties": {
"SubnetId": {
"Ref": "VpcPublicSubnet1Subnet5C2D37C4"
},
"AllocationId": {
"Fn::GetAtt": [
"VpcPublicSubnet1EIPD7E02669",
"AllocationId"
]
},
"SubnetId": {
"Ref": "VpcPublicSubnet1Subnet5C2D37C4"
},
"Tags": [
{
"Key": "Name",
Expand Down Expand Up @@ -205,15 +205,15 @@
"VpcPublicSubnet2NATGateway9182C01D": {
"Type": "AWS::EC2::NatGateway",
"Properties": {
"SubnetId": {
"Ref": "VpcPublicSubnet2Subnet691E08A3"
},
"AllocationId": {
"Fn::GetAtt": [
"VpcPublicSubnet2EIP3C605A87",
"AllocationId"
]
},
"SubnetId": {
"Ref": "VpcPublicSubnet2Subnet691E08A3"
},
"Tags": [
{
"Key": "Name",
Expand Down Expand Up @@ -1219,7 +1219,7 @@
"Properties": {
"Code": {
"S3Bucket": {
"Ref": "AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3Bucket55EFA30C"
"Ref": "AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3BucketD8D20B9A"
},
"S3Key": {
"Fn::Join": [
Expand All @@ -1232,7 +1232,7 @@
"Fn::Split": [
"||",
{
"Ref": "AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3VersionKey60329B70"
"Ref": "AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3VersionKeyCD2774D3"
}
]
}
Expand All @@ -1245,7 +1245,7 @@
"Fn::Split": [
"||",
{
"Ref": "AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3VersionKey60329B70"
"Ref": "AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3VersionKeyCD2774D3"
}
]
}
Expand Down Expand Up @@ -1348,17 +1348,17 @@
"Type": "String",
"Description": "Artifact hash for asset \"e9882ab123687399f934da0d45effe675ecc8ce13b40cb946f3e1d6141fe8d68\""
},
"AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3Bucket55EFA30C": {
"AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3BucketD8D20B9A": {
"Type": "String",
"Description": "S3 bucket for asset \"c24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cf\""
"Description": "S3 bucket for asset \"1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24ea\""
},
"AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3VersionKey60329B70": {
"AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3VersionKeyCD2774D3": {
"Type": "String",
"Description": "S3 key for asset version \"c24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cf\""
"Description": "S3 key for asset version \"1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24ea\""
},
"AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfArtifactHash85F58E48": {
"AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaArtifactHash3943F7F3": {
"Type": "String",
"Description": "Artifact hash for asset \"c24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cf\""
"Description": "Artifact hash for asset \"1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24ea\""
},
"AssetParameters972240f9dd6e036a93d5f081af9a24315b2053828ac049b3b19b2fa12d7ae64aS3Bucket1F1A8472": {
"Type": "String",
Expand All @@ -1385,4 +1385,4 @@
"Description": "Artifact hash for asset \"872561bf078edd1685d50c9ff821cdd60d2b2ddfb0013c4087e79bf2bb50724d\""
}
}
}
}
23 changes: 23 additions & 0 deletions packages/@aws-cdk/aws-s3-deployment/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,29 @@ new BucketDeployment(this, 'HTMLBucketDeployment', {
});
```

## Exclude and Include Filters

There are two points at which filters are evaluated in a deployment: asset bundling and the actual deployment. If you simply want to exclude files in the asset bundling process, you should leverage the `exclude` property of `AssetOptions` when defining your source:

```ts
new BucketDeployment(this, 'HTMLBucketDeployment', {
sources: [Source.asset('./website', { exclude: ['*', '!index.html'] })],
destinationBucket: bucket,
});
```

If you want to specify filters to be used in the deployment process, you can use the `exclude` and `include` filters on `BucketDeployment`. If excluded, these files will not be deployed to the destination bucket. In addition, if the file already exists in the destination bucket, it will not be deleted if you are using the `prune` option:

```ts
new s3deploy.BucketDeployment(this, 'DeployButExcludeSpecificFiles', {
sources: [s3deploy.Source.asset(path.join(__dirname, 'my-website'))],
destinationBucket,
exclude: ['*.txt']
});
```

These filters follow the same format that is used for the AWS CLI. See the CLI documentation for information on [Using Include and Exclude Filters](https://docs.aws.amazon.com/cli/latest/reference/s3/index.html#use-of-exclude-and-include-filters).

## Objects metadata

You can specify metadata to be set on all the objects in your deployment.
Expand Down
25 changes: 25 additions & 0 deletions packages/@aws-cdk/aws-s3-deployment/lib/bucket-deployment.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,29 @@ export interface BucketDeploymentProps {
*/
readonly destinationKeyPrefix?: string;

/**
* If this is set, matching files or objects will be excluded from the deployment's sync
* command. This can be used to exclude a file from being pruned in the destination bucket.
*
* If you want to just exclude files from the deployment package (which excludes these files
* evaluated when invalidating the asset), you should leverage the `exclude` property of
* `AssetOptions` when defining your source.
*
* @default - No exclude filters are used
* @see https://docs.aws.amazon.com/cli/latest/reference/s3/index.html#use-of-exclude-and-include-filters
*/
readonly exclude?: string[]

/**
* If this is set, matching files or objects will be included with the deployment's sync
* command. Since all files from the deployment package are included by default, this property
* is usually leveraged alongside an `exclude` filter.
*
* @default - No include filters are used and all files are included with the sync command
* @see https://docs.aws.amazon.com/cli/latest/reference/s3/index.html#use-of-exclude-and-include-filters
*/
readonly include?: string[]

/**
* If this is set to false, files in the destination bucket that
* do not exist in the asset, will NOT be deleted during deployment (create/update).
Expand Down Expand Up @@ -245,6 +268,8 @@ export class BucketDeployment extends CoreConstruct {
DestinationBucketKeyPrefix: props.destinationKeyPrefix,
RetainOnDelete: props.retainOnDelete,
Prune: props.prune ?? true,
Exclude: props.exclude,
Include: props.include,
UserMetadata: props.metadata ? mapUserMetadata(props.metadata) : undefined,
SystemMetadata: mapSystemMetadata(props),
DistributionId: props.distribution?.distributionId,
Expand Down
14 changes: 12 additions & 2 deletions packages/@aws-cdk/aws-s3-deployment/lib/lambda/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ def cfn_error(message=None):
user_metadata = props.get('UserMetadata', {})
system_metadata = props.get('SystemMetadata', {})
prune = props.get('Prune', 'true').lower() == 'true'
exclude = props.get('Exclude', [])
include = props.get('Include', [])

default_distribution_path = dest_bucket_prefix
if not default_distribution_path.endswith("/"):
Expand Down Expand Up @@ -100,7 +102,7 @@ def cfn_error(message=None):
aws_command("s3", "rm", old_s3_dest, "--recursive")

if request_type == "Update" or request_type == "Create":
s3_deploy(s3_source_zips, s3_dest, user_metadata, system_metadata, prune)
s3_deploy(s3_source_zips, s3_dest, user_metadata, system_metadata, prune, exclude, include)

if distribution_id:
cloudfront_invalidate(distribution_id, distribution_paths)
Expand All @@ -114,7 +116,7 @@ def cfn_error(message=None):

#---------------------------------------------------------------------------------------------------
# populate all files from s3_source_zips to a destination bucket
def s3_deploy(s3_source_zips, s3_dest, user_metadata, system_metadata, prune):
def s3_deploy(s3_source_zips, s3_dest, user_metadata, system_metadata, prune, exclude, include):
# create a temporary working directory
workdir=tempfile.mkdtemp()
logger.info("| workdir: %s" % workdir)
Expand All @@ -139,6 +141,14 @@ def s3_deploy(s3_source_zips, s3_dest, user_metadata, system_metadata, prune):
if prune:
s3_command.append("--delete")

if exclude:
for filter in exclude:
s3_command.extend(["--exclude", filter])

if include:
for filter in include:
s3_command.extend(["--include", filter])

s3_command.extend([contents_dir, s3_dest])
s3_command.extend(create_metadata_args(user_metadata, system_metadata))
aws_command(*s3_command)
Expand Down
76 changes: 76 additions & 0 deletions packages/@aws-cdk/aws-s3-deployment/test/bucket-deployment.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,82 @@ test('deploy without deleting missing files from destination', () => {
});
});

test('deploy with excluded files from destination', () => {

// GIVEN
const stack = new cdk.Stack();
const bucket = new s3.Bucket(stack, 'Dest');

// WHEN
new s3deploy.BucketDeployment(stack, 'Deploy', {
sources: [s3deploy.Source.asset(path.join(__dirname, 'my-website'))],
destinationBucket: bucket,
exclude: ['sample.js'],
});

expect(stack).toHaveResourceLike('Custom::CDKBucketDeployment', {
Exclude: ['sample.js'],
});
});

test('deploy with included files from destination', () => {

// GIVEN
const stack = new cdk.Stack();
const bucket = new s3.Bucket(stack, 'Dest');

// WHEN
new s3deploy.BucketDeployment(stack, 'Deploy', {
sources: [s3deploy.Source.asset(path.join(__dirname, 'my-website'))],
destinationBucket: bucket,
include: ['sample.js'],
});

expect(stack).toHaveResourceLike('Custom::CDKBucketDeployment', {
Include: ['sample.js'],
});
});

test('deploy with excluded and included files from destination', () => {

// GIVEN
const stack = new cdk.Stack();
const bucket = new s3.Bucket(stack, 'Dest');

// WHEN
new s3deploy.BucketDeployment(stack, 'Deploy', {
sources: [s3deploy.Source.asset(path.join(__dirname, 'my-website'))],
destinationBucket: bucket,
exclude: ['sample/*'],
include: ['sample/include.json'],
});

expect(stack).toHaveResourceLike('Custom::CDKBucketDeployment', {
Exclude: ['sample/*'],
Include: ['sample/include.json'],
});
});

test('deploy with multiple exclude and include filters', () => {

// GIVEN
const stack = new cdk.Stack();
const bucket = new s3.Bucket(stack, 'Dest');

// WHEN
new s3deploy.BucketDeployment(stack, 'Deploy', {
sources: [s3deploy.Source.asset(path.join(__dirname, 'my-website'))],
destinationBucket: bucket,
exclude: ['sample/*', 'another/*'],
include: ['sample/include.json', 'another/include.json'],
});

expect(stack).toHaveResourceLike('Custom::CDKBucketDeployment', {
Exclude: ['sample/*', 'another/*'],
Include: ['sample/include.json', 'another/include.json'],
});
});

test('deployment allows vpc to be implicitly supplied to lambda', () => {

// GIVEN
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@
"Properties": {
"Code": {
"S3Bucket": {
"Ref": "AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3Bucket55EFA30C"
"Ref": "AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3BucketD8D20B9A"
},
"S3Key": {
"Fn::Join": [
Expand All @@ -308,7 +308,7 @@
"Fn::Split": [
"||",
{
"Ref": "AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3VersionKey60329B70"
"Ref": "AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3VersionKeyCD2774D3"
}
]
}
Expand All @@ -321,7 +321,7 @@
"Fn::Split": [
"||",
{
"Ref": "AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3VersionKey60329B70"
"Ref": "AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3VersionKeyCD2774D3"
}
]
}
Expand Down Expand Up @@ -365,17 +365,17 @@
"Type": "String",
"Description": "Artifact hash for asset \"e9882ab123687399f934da0d45effe675ecc8ce13b40cb946f3e1d6141fe8d68\""
},
"AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3Bucket55EFA30C": {
"AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3BucketD8D20B9A": {
"Type": "String",
"Description": "S3 bucket for asset \"c24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cf\""
"Description": "S3 bucket for asset \"1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24ea\""
},
"AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfS3VersionKey60329B70": {
"AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaS3VersionKeyCD2774D3": {
"Type": "String",
"Description": "S3 key for asset version \"c24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cf\""
"Description": "S3 key for asset version \"1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24ea\""
},
"AssetParametersc24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cfArtifactHash85F58E48": {
"AssetParameters1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24eaArtifactHash3943F7F3": {
"Type": "String",
"Description": "Artifact hash for asset \"c24b999656e4fe6c609c31bae56a1cf4717a405619c3aa6ba1bc686b8c2c86cf\""
"Description": "Artifact hash for asset \"1f6de40da10b415b255c07df709f791e772ffb9f7bdd14ad81fb75643aad24ea\""
},
"AssetParametersfc4481abf279255619ff7418faa5d24456fef3432ea0da59c95542578ff0222eS3Bucket9CD8B20A": {
"Type": "String",
Expand All @@ -390,4 +390,4 @@
"Description": "Artifact hash for asset \"fc4481abf279255619ff7418faa5d24456fef3432ea0da59c95542578ff0222e\""
}
}
}
}
Loading

0 comments on commit d42e89e

Please sign in to comment.