From e60d528f89b5bc7ca8168cfe2491ec5a12f8d68a Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Tue, 30 Jun 2020 09:58:32 +0200 Subject: [PATCH 1/9] feat: Developer Preview of CDK Pipelines Adds an initial, Developer Preview version of CDK Pipelines, a higher-level construct library to make it easy to set up CI/CD pipelines for CDK apps. Resolves aws/aws-cdk-rfcs#49. --- packages/@aws-cdk/pipelines/.eslintrc.js | 3 + packages/@aws-cdk/pipelines/.gitignore | 16 + packages/@aws-cdk/pipelines/.npmignore | 21 + packages/@aws-cdk/pipelines/LICENSE | 201 +++ packages/@aws-cdk/pipelines/NOTICE | 2 + packages/@aws-cdk/pipelines/README.md | 542 +++++++ .../lib/actions/deploy-cdk-stack-action.ts | 360 +++++ .../@aws-cdk/pipelines/lib/actions/index.ts | 3 + .../lib/actions/publish-assets-action.ts | 153 ++ .../lib/actions/update-pipeline-action.ts | 127 ++ packages/@aws-cdk/pipelines/lib/index.ts | 5 + packages/@aws-cdk/pipelines/lib/pipeline.ts | 313 ++++ .../pipelines/lib/private/asset-manifest.ts | 296 ++++ .../lib/private/construct-internals.ts | 37 + .../pipelines/lib/private/toposort.ts | 47 + packages/@aws-cdk/pipelines/lib/stage.ts | 386 +++++ .../@aws-cdk/pipelines/lib/synths/_util.ts | 15 + .../@aws-cdk/pipelines/lib/synths/index.ts | 1 + .../lib/synths/simple-synth-action.ts | 353 +++++ .../pipelines/lib/validation/_files.ts | 97 ++ .../pipelines/lib/validation/index.ts | 1 + .../lib/validation/shell-script-action.ts | 183 +++ packages/@aws-cdk/pipelines/package.json | 119 ++ .../@aws-cdk/pipelines/test/builds.test.ts | 142 ++ .../test/cross-environment-infra.test.ts | 76 + .../test/integ.pipeline.expected.json | 1316 +++++++++++++++++ .../@aws-cdk/pipelines/test/integ.pipeline.ts | 80 + .../pipelines/test/pipeline-assets.test.ts | 216 +++ .../@aws-cdk/pipelines/test/pipeline.test.ts | 286 ++++ .../pipelines/test/stack-ordering.test.ts | 83 ++ .../test/test-docker-asset/Dockerfile | 2 + .../pipelines/test/test-file-asset-two.txt | 1 + .../pipelines/test/test-file-asset.txt | 1 + .../@aws-cdk/pipelines/test/testmatchers.ts | 121 ++ packages/@aws-cdk/pipelines/test/testutil.ts | 106 ++ .../pipelines/test/validation.test.ts | 178 +++ yarn.lock | 129 +- 37 files changed, 6006 insertions(+), 12 deletions(-) create mode 100644 packages/@aws-cdk/pipelines/.eslintrc.js create mode 100644 packages/@aws-cdk/pipelines/.gitignore create mode 100644 packages/@aws-cdk/pipelines/.npmignore create mode 100644 packages/@aws-cdk/pipelines/LICENSE create mode 100644 packages/@aws-cdk/pipelines/NOTICE create mode 100644 packages/@aws-cdk/pipelines/README.md create mode 100644 packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts create mode 100644 packages/@aws-cdk/pipelines/lib/actions/index.ts create mode 100644 packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts create mode 100644 packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts create mode 100644 packages/@aws-cdk/pipelines/lib/index.ts create mode 100644 packages/@aws-cdk/pipelines/lib/pipeline.ts create mode 100644 packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts create mode 100644 packages/@aws-cdk/pipelines/lib/private/construct-internals.ts create mode 100644 packages/@aws-cdk/pipelines/lib/private/toposort.ts create mode 100644 packages/@aws-cdk/pipelines/lib/stage.ts create mode 100644 packages/@aws-cdk/pipelines/lib/synths/_util.ts create mode 100644 packages/@aws-cdk/pipelines/lib/synths/index.ts create mode 100644 packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts create mode 100644 packages/@aws-cdk/pipelines/lib/validation/_files.ts create mode 100644 packages/@aws-cdk/pipelines/lib/validation/index.ts create mode 100644 packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts create mode 100644 packages/@aws-cdk/pipelines/package.json create mode 100644 packages/@aws-cdk/pipelines/test/builds.test.ts create mode 100644 packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts create mode 100644 packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json create mode 100644 packages/@aws-cdk/pipelines/test/integ.pipeline.ts create mode 100644 packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts create mode 100644 packages/@aws-cdk/pipelines/test/pipeline.test.ts create mode 100644 packages/@aws-cdk/pipelines/test/stack-ordering.test.ts create mode 100644 packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile create mode 100644 packages/@aws-cdk/pipelines/test/test-file-asset-two.txt create mode 100644 packages/@aws-cdk/pipelines/test/test-file-asset.txt create mode 100644 packages/@aws-cdk/pipelines/test/testmatchers.ts create mode 100644 packages/@aws-cdk/pipelines/test/testutil.ts create mode 100644 packages/@aws-cdk/pipelines/test/validation.test.ts diff --git a/packages/@aws-cdk/pipelines/.eslintrc.js b/packages/@aws-cdk/pipelines/.eslintrc.js new file mode 100644 index 0000000000000..61dd8dd001f63 --- /dev/null +++ b/packages/@aws-cdk/pipelines/.eslintrc.js @@ -0,0 +1,3 @@ +const baseConfig = require('cdk-build-tools/config/eslintrc'); +baseConfig.parserOptions.project = __dirname + '/tsconfig.json'; +module.exports = baseConfig; diff --git a/packages/@aws-cdk/pipelines/.gitignore b/packages/@aws-cdk/pipelines/.gitignore new file mode 100644 index 0000000000000..32a10d785e8fb --- /dev/null +++ b/packages/@aws-cdk/pipelines/.gitignore @@ -0,0 +1,16 @@ +*.js +tsconfig.json +*.js.map +*.d.ts +*.generated.ts +dist +lib/generated/resources.ts +.jsii + +.LAST_BUILD +.nyc_output +coverage +nyc.config.js +.LAST_PACKAGE +*.snk +!.eslintrc.js diff --git a/packages/@aws-cdk/pipelines/.npmignore b/packages/@aws-cdk/pipelines/.npmignore new file mode 100644 index 0000000000000..174864d493a79 --- /dev/null +++ b/packages/@aws-cdk/pipelines/.npmignore @@ -0,0 +1,21 @@ +# Don't include original .ts files when doing `npm pack` +*.ts +!*.d.ts +coverage +.nyc_output +*.tgz + +dist +.LAST_PACKAGE +.LAST_BUILD +!*.js + +# Include .jsii +!.jsii + +*.snk + +*.tsbuildinfo + +tsconfig.json +.eslintrc.js diff --git a/packages/@aws-cdk/pipelines/LICENSE b/packages/@aws-cdk/pipelines/LICENSE new file mode 100644 index 0000000000000..b71ec1688783a --- /dev/null +++ b/packages/@aws-cdk/pipelines/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/@aws-cdk/pipelines/NOTICE b/packages/@aws-cdk/pipelines/NOTICE new file mode 100644 index 0000000000000..bfccac9a7f69c --- /dev/null +++ b/packages/@aws-cdk/pipelines/NOTICE @@ -0,0 +1,2 @@ +AWS Cloud Development Kit (AWS CDK) +Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/packages/@aws-cdk/pipelines/README.md b/packages/@aws-cdk/pipelines/README.md new file mode 100644 index 0000000000000..8bb40c1b863c5 --- /dev/null +++ b/packages/@aws-cdk/pipelines/README.md @@ -0,0 +1,542 @@ +# CDK Pipelines + +--- + +![cdk-constructs: Developer Preview](https://img.shields.io/badge/cdk--constructs-developer--preview-informational.svg?style=for-the-badge) + +> The APIs of higher level constructs in this module are in **developer preview** before they become stable. We will only make breaking changes to address unforeseen API issues. Therefore, these APIs are not subject to [Semantic Versioning](https://semver.org/), and breaking changes will be announced in release notes. This means that while you may use them, you may need to update your source code when upgrading to a newer version of this package. + +--- + + + +A construct library for painless Continuous Delivery of CDK applications. + +![Developer Preview](https://img.shields.io/badge/developer--preview-informational.svg?style=for-the-badge) + +> This module is in **developer preview**. We may make breaking changes to address unforeseen API issues. Therefore, these APIs are not subject to [Semantic Versioning](https://semver.org/), and breaking changes will be announced in release notes. This means that while you may use them, you may need to update your source code when upgrading to a newer version of this package. + +## At a glance + +Defining a pipeline for your application is as simple as defining a subclass +of `Stage`, and calling `pipeline.addApplicationStage()` with instances of +that class. Deploying to a different account or region looks exactly the +same, the *CDK Pipelines* library takes care the differences. + +```ts +import { Construct, Stage } from '@aws-cdk/core'; + +/** + * Your application + * + * May consist of one or more Stacks + */ +class MyApplication extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const dbStack = new DatabaseStack(this, 'Database'); + new ComputeStack(this, 'Compute', { + table: dbStack.table, + }); + } +} + +/** + * Stack to hold the pipeline + */ +class MyPipelineStack extends Stack { + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const sourceArtifact = new codepipeline.Artifact(); + const cloudAssemblyArtifact = new codepipeline.Artifact(); + + const pipeline = new CdkPipeline(this, 'Pipeline', { + // ...source and build information here (see below) + }); + + // Do this as many times as necessary with any account and region + // Account and region may different from the pipeline's. + pipeline.addApplicationStage(new MyApplication(this, 'Prod', { + env: { + account: '123456789012', + region: 'eu-west-1', + } + })); + } +} +``` + +The pipeline is **self-mutating**, which means that if you add new +application stages in the source code, or new stacks to `MyApplication`, the +pipeline will automatically reconfigure itself to deploy those new stages and +stacks. + +## CDK Versioning + +This library requires exactly CDK version `1.45.0`. The rest of your application must +use the same version. + +It uses prerelease features of the CDK framework, which can be enabled by adding the +following to `cdk.json`: + +``` +{ + ... + "context": { + "@aws-cdk/core:newStyleStackSynthesis": true + } +} +``` + +When bootstrapping, the environment variable `CDK_NEW_BOOTSTRAP=1` should be +set (see the section called **CDK Bootstrapping**). + +## Defining the Pipeline (Source and Synth) + +The pipeline is defined by instantiating `CdkPipeline` in a Stack. This defines the +source location for the pipeline as well as the build commands. For example, the following +defines a pipeline whose source is stored in a GitHub repository, and uses NPM +to build. The Pipeline will be provisioned in account `111111111111` and region +`eu-west-1`: + +```ts +class MyPipelineStack extends Stack { + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const sourceArtifact = new codepipeline.Artifact(); + const cloudAssemblyArtifact = new codepipeline.Artifact(); + + const pipeline = new CdkPipeline(this, 'Pipeline', { + pipelineName: 'MyAppPipeline', + cloudAssemblyArtifact, + + sourceAction: new codepipeline_actions.GitHubSourceAction({ + actionName: 'GitHub', + output: sourceArtifact, + oauthToken: SecretValue.secretsManager('GITHUB_TOKEN_NAME'), + trigger: codepipeline_actions.GitHubTrigger.POLL, + // Replace these with your actual GitHub project name + owner: 'OWNER', + repo: 'REPO', + }), + + synthAction: SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + projectName: 'MyAppPipeline-synth', + + // Use this if you need a build step (if you're not using ts-node + // or if you have TypeScript Lambdas that need to be compiled). + buildCommand: 'npm run build', + }), + }); + } +} + +const app = new App(); +new MyPipelineStack(this, 'PipelineStack', { + env: { + account: '111111111111', + region: 'eu-west-1', + } +}); +``` + +You provision this pipeline by making sure the target environment has been +bootstrapped (see below), and then executing `cdk deploy PipelineStack` +*once*. Afterwards, the pipeline will keep itself up-to-date. + +> **Important**: be sure to `git commit` and `git push` before deploying the +> Pipeline stack using `cdk deploy`! +> +> The reason is that the pipeline will start deploying and self-mutating +> right away based on the sources in the repository, so the sources it finds +> in there should be the ones you want it to find. + +### Sources + +Any of the regular sources from the `@aws-cdk/aws-codepipeline-actions` module can be used. + +#### GitHub + +If you want to use a GitHub repository as the source, you must also create: + +* A [GitHub Access Token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) +* A [Secrets Manager PlainText Secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_create-basic-secret.html) + with the value of the **GitHub Access Token**. Pick whatever name you want + (for example `github-token`) and pass it as the argument of `oauthToken`. + +### Synths + +You define how to build and synth the project by specifying a `synthAction`. This can be +any CodePipeline action that produces an artifact with a CDK Cloud Assembly in it. Pass +the output artifact of the synth in the Pipeline's `cloudAssemblyArtifact` property. + +`SimpleSynthAction` is available for synths that can be performed by running a couple +of simple shell commands (install, build, and synth). Available as factory functions +on `SimpleSynthAction` are some common convention-based synth: + +* `SimpleSynthAction.standardNpmSynth()`: build using NPM conventions. Expects a `package-lock.json`, + a `cdk.json`, and expects the CLI to be a versioned dependency in `package.json`. Does + not perform a build step by default. The source repository does not need to + have a `buildspec.yml`. +* `CdkSynth.standardYarnSynth()`: build using Yarn conventions. Expects a `yarn.lock` + a `cdk.json`, and expects the CLI to be a versioned dependency in `package.json`. Does + not perform a build step by default. The source repository does not need to + have a `buildspec.yml`. + +If you need a custom build/synth step that is not covered by `SimpleSynthAction`, you can +always add a custom CodeBuild project and pass a corresponding `CodeBuildAction` to the +pipeline. + +## Adding Application Stages + +To define an application that can be added to the pipeline integrally, define a subclass +of `Stage`. The `Stage` can contain one or more stack which make up your application. If +there are dependencies between the stacks, the stacks will automatically be added to the +pipeline in the right order. Stacks that don't depend on each other will be deployed in +parallel. You can add a dependency relationship between stacks by calling +`stack1.addDependency(stack2)`. + +Stages take a default `env` argument which the Stacks inside the Stage will fall back to +if no `env` is defined for them. + +An application is added to the pipeline by calling `addApplicationStage()` with instances +of the Stage. The same class can be instantiated and added to the pipeline multiple times +to define different stages of your DTAP or multi-region application pipeline: + +```ts +// Testing stage +pipeline.addApplicationStage(new MyApplication(this, 'Testing', { + env: { account: '111111111111', region: 'eu-west-1' } +})); + +// Acceptance stage +pipeline.addApplicationStage(new MyApplication(this, 'Acceptance', { + env: { account: '222222222222', region: 'eu-west-1' } +})); + +// Production stage +pipeline.addApplicationStage(new MyApplication(this, 'Production', { + env: { account: '333333333333', region: 'eu-west-1' } +})); +``` + +### More Control + +Every *Application Stage* added by `addApplicationStage()` will lead to the addition of +an individual *Pipeline Stage*, which is subsequently returned. You can add more +actions to the stage by calling `addCustomAction()` on it. For example: + +```ts +const testingStage = pipeline.addApplicationStage(new MyApplication(this, 'Testing', { + env: { account: '111111111111', region: 'eu-west-1' } +})); + +// Add a custom action -- in this case, a Manual Approval action +// (for illustration purposes: testingStage.addManualApprovalAction() is a +// convenience shorthand that does the same) +testingStage.addCustomAction(new ManualApprovalAction({ + actionName: 'ManualApproval', + runOrder: testingStage.nextSequentialRunOrder(), +})); +``` + +You can also add more than one *Application Stage* to one *Pipeline Stage*. For example: + +```ts +// Create an empty pipeline stage +const testingStage = pipeline.addStage('Testing'); + +// Add two application stages to the same pipeline stage +testingStage.addApplication(new MyApplication1(this, 'MyApp1', { + env: { account: '111111111111', region: 'eu-west-1' } +})); +testingStage.addApplication(new MyApplication2(this, 'MyApp2', { + env: { account: '111111111111', region: 'eu-west-1' } +})); +``` + +## Adding validations to the pipeline + +You can add any type of CodePipeline Action to the pipeline in order to validate +the deployments you are performing. + +The CDK Pipelines construct library comes with a `ShellScriptAction` which uses AWS CodeBuild +to run a set of shell commands (potentially running a test set that comes with your application, +using stack outputs of the deployed stacks). + +In its simplest form, adding validation actions looks like this: + +```ts +const stage = pipeline.addApplicationStage(new MyApplication(/* ... */)); + +stage.addActions(new ShellScriptAction({ + name: 'MyValidation', + commands: ['curl -Ssf https://my.webservice.com/'], + // ... more configuration ... +})); +``` + +### Using CloudFormation Stack Outputs in ShellScriptAction + +Because many CloudFormation deployments result in the generation of resources with unpredictable +names, validations have support for reading back CloudFormation Outputs after a deployment. This +makes it possible to pass (for example) the generated URL of a load balancer to the test set. + +To use Stack Outputs, expose the `CfnOutput` object you're interested in, and +call `pipeline.stackOutput()` on it: + +```ts +class MyLbApplication extends Stage { + public readonly loadBalancerAddress: CfnOutput; + + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const lbStack = new LoadBalancerStack(this, 'Stack'); + + // Or create this in `LoadBalancerStack` directly + this.loadBalancerAddress = new CfnOutput(lbStack, 'LbAddress', { + value: `https://${lbStack.loadBalancer.loadBalancerDnsName}/` + }); + } +} + +const lbApp = new MyLbApplication(this, 'MyApp', { + env: { /* ... */ } +}); +const stage = pipeline.addApplicationStage(lbApp); +stage.addActions(new ShellScriptAction({ + // ... + useOutputs: { + // When the test is executed, this will make $URL contain the + // load balancer address. + URL: pipeline.stackOutput(lbApp.loadBalancerAddress), + } +}); +``` + +### Using additional files in Shell Script Actions + +As part of a validation, you probably want to run a test suite that's more +elaborate than what can be expressed in a couple of lines of shell script. +You can bring additional files into the shell script validation by supplying +the `additionalArtifacts` property. + +Here are some typical examples for how you might want to bring in additional +files from several sources: + +* Directoy from the source repository +* Additional compiled artifacts from the synth step + +#### Additional files from the source repository + +Bringing in additional files from the source repository is appropriate if the +files in the source repository are directly usable in the test (for example, +if they are executable shell scripts themselves). Pass the `sourceArtifact`: + +```ts +const sourceArtifact = new codepipeline.Artifact(); + +const pipeline = new CdkPipeline(this, 'Pipeline', { + // ... +}); + +const validationAction = new ShellScriptAction({ + name: 'TestUsingSourceArtifact', + additionalArtifacts: [sourceArtifact], + + // 'test.sh' comes from the source repository + commands: ['./test.sh'], +}); +``` + +#### Additional files from the synth step + +Getting the additional files from the synth step is appropriate if your +tests need the compilation step that is done as part of synthesis. + +On the synthesis step, specify `additionalArtifacts` to package +additional subdirectories into artifacts, and use the same artifact +in the `ShellScriptAction`'s `additionalArtifacts`: + +```ts +// If you are using additional output artifacts from the synth step, +// they must be named. +const cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); +const integTestsArtifact = new codepipeline.Artifact('IntegTests'); + +const pipeline = new CdkPipeline(this, 'Pipeline', { + synthAction: SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + buildCommand: 'npm run build', + additionalArtifacts: [ + { + directory: 'test', + artifact: integTestsArtifact, + } + ], + }), + // ... +}); + +const validationAction = new ShellScriptAction({ + name: 'TestUsingBuildArtifact', + additionalArtifacts: [integTestsArtifact], + // 'test.js' was produced from 'test/test.ts' during the synth step + commands: ['node ./test.js'], +}); +``` + +## CDK Bootstrapping + +An *environment* is an *(account, region)* pair where you want to deploy a CDK +stack (see [Environments](https://docs.aws.amazon.com/cdk/latest/guide/environments.html) +in the CDK Developer Guide). + +Before you can provision the pipeline, you have to *bootstrap* the environment you want +to create it in. If you are deploying your application to different environments, you +also have to bootstrap those and be sure to add a *trust* relationship. + +To bootstrap an environment for provisioning the pipeline: + +``` +$ env CDK_NEW_BOOTSTRAP=1 npx cdk bootstrap \ + [--profile admin-profile-1] \ + --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ + aws://111111111111/us-east-1 +``` + +To bootstrap a different environment for deploying CDK applications into using +a pipeline in account `111111111111`: + +``` +$ env CDK_NEW_BOOTSTRAP=1 npx cdk bootstrap \ + [--profile admin-profile-2] \ + --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ + --trust 11111111111 \ + aws://222222222222/us-east-2 +``` + +These command lines explained: + +* `npx`: means to use the CDK CLI from the current NPM install. If you are using + a global install of the CDK CLI, leave this out. +* `--profile`: should indicate a profile with administrator privileges that has + permissions to provision a pipeline in the indicated account. You can leave this + flag out if either the AWS default credentials or the `AWS_*` environment + variables confer these permissions. +* `--cloudformation-execution-policies`: ARN of the managed policy that future CDK + deployments should execute with. You can tailor this to the needs of your organization + and give more constrained permissions than `AdministratorAccess`. +* `--trust`: indicates which other account(s) should have permissions to deploy + CDK applications into this account. In this case we indicate the Pipeline's account, + but you could also use this for developer accounts (don't do that for production + application accounts though!). +* `aws://222222222222/us-east-2`: the account and region we're bootstrapping. + +> **Security tip**: we recommend that you use administrative credentials to an +> account only to bootstrap it and provision the initial pipeline. Otherwise, +> access to administrative credentials should be dropped as soon as possible. + +### Migrating from old bootstrap stack + +The bootstrap stack is a CloudFormation stack in your account named +**CDKToolkit** that provisions a set of resources required for the CDK +to deploy into that environment. + +The "new" bootstrap stack (obtained by running `cdk bootstrap` with +`CDK_NEW_BOOTSTRAP=1`) is slightly more elaborate than the "old" stack. It +contains: + +* An S3 bucket and ECR repository with predictable names, so that we can reference + assets in these storage locations *without* the use of CloudFormation template + parameters. +* A set of roles with permissions to access these asset locations and to execute + CloudFormation, assumeable from whatever accounts you specify under `--trust`. + +It is possible and safe to migrate from the old bootstrap stack to the new +bootstrap stack. This will create a new S3 file asset bucket in your account +and orphan the old bucket. You should manually delete the orphaned bucket +after you are sure you have redeployed all CDK applications and there are no +more references to the old asset bucket. + +## Security Tips + +It's important to stay safe while employing Continuous Delivery. The CDK Pipelines +library comes with secure defaults to the best of our ability, but by its +very nature the library cannot take care of everything. + +We therefore expect you to mind the following: + +* Maintain dependency hygiene and vet 3rd-party software you use. Any software you + run on your build machine has the ability to change the infrastructure that gets + deployed. Be careful with the software you depend on. + +* Use dependency locking to prevent accidental upgrades! The default `CdkSynths` that + come with CDK Pipelines will expect `package-lock.json` and `yarn.lock` to + ensure your dependencies are the ones you expect. + +* Credentials to production environments should be short-lived. After + bootstrapping and the initial pipeline provisioning, there is no more need for + developers to have access to any of the account credentials; all further + changes can be deployed through git. Avoid the chances of credentials leaking + by not having them in the first place! + +## Troubleshooting + +Here are some common errors you may encounter while using this library. + +### Pipeline: Internal Failure + +If you see the following error: + +``` +CREATE_FAILED | AWS::CodePipeline::Pipeline | Pipeline/Pipeline +Internal Failure +``` + +There's something wrong with your GitHub access token. It might be missing, or not have the +right permissions to access the repository you're trying to access. + +### Key: Policy contains a statement with one or more invalid principals + +If you see the following error: + +``` +CREATE_FAILED | AWS::KMS::Key | Pipeline/Pipeline/ArtifactsBucketEncryptionKey +Policy contains a statement with one or more invalid principals. +``` + +One of the target (account, region) environments has not been bootstrapped +with the new bootstrap stack. Check your target environments and make sure +they are all bootstrapped. + +## Current Limitations + +Limitations that we are aware of and will address: + +* **No context queries**: context queries are not supported. That means that + Vpc.fromLookup() and other functions like it will not work. + +## Known Issues + +There are some usability issues that are caused by underlying technology, and +cannot be remedied by CDK at this point. They are reproduced here for completeness. + +- **Console links to other accounts will not work**: the AWS CodePipeline + console will assume all links are relative to the current account. You will + not be able to use the pipeline console to click through to a CloudFormation + stack in a different account. +- **If a change set failed to apply the pipeline must restarted**: if a change + set failed to apply, it cannot be retried. The pipeline must be restarted from + the top by clicking **Release Change**. +- **A stack that failed to create must be deleted manually**: if a stack + failed to create on the first attempt, you must delete it using the + CloudFormation console before starting the pipeline again by clicking + **Release Change**. diff --git a/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts b/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts new file mode 100644 index 0000000000000..f4d880802348b --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts @@ -0,0 +1,360 @@ +import * as cfn from '@aws-cdk/aws-cloudformation'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as cpactions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import * as iam from '@aws-cdk/aws-iam'; +import { Arn, Construct, Fn, Stack } from '@aws-cdk/core'; +import * as cxapi from '@aws-cdk/cx-api'; +import * as path from 'path'; +import { appOf, assemblyBuilderOf } from '../private/construct-internals'; + +/** + * Customization options for a DeployCdkStackAction + */ +export interface DeployCdkStackActionOptions { + /** + * Base name of the action + * + * @default stackName + */ + readonly baseActionName?: string; + + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * Run order for the Prepare action + * + * @default 1 + */ + readonly prepareRunOrder?: number; + + /** + * Run order for the Execute action + * + * @default - prepareRunOrder + 1 + */ + readonly executeRunOrder?: number; + + /** + * Artifact to write Stack Outputs to + * + * @default - No outputs + */ + readonly output?: codepipeline.Artifact; + + /** + * Filename in output to write Stack outputs to + * + * @default - Required when 'output' is set + */ + readonly outputFileName?: string; + + /** + * Name of the change set to create and deploy + * + * @default 'PipelineChange' + */ + readonly changeSetName?: string; +} + +/** + * Properties for a DeployCdkStackAction + */ +export interface DeployCdkStackActionProps extends DeployCdkStackActionOptions { + /** + * Relative path of template in the input artifact + */ + readonly templatePath: string; + + /** + * Role for the action to assume + * + * This controls the account to deploy into + */ + readonly actionRole: iam.IRole; + + /** + * The name of the stack that should be created/updated + */ + readonly stackName: string; + + /** + * Role to execute CloudFormation under + * + * @default - Execute CloudFormation using the action role + */ + readonly cloudFormationExecutionRole?: iam.IRole; + + /** + * Region to deploy into + * + * @default - Same region as pipeline + */ + readonly region?: string; + + /** + * Artifact ID for the stack deployed here + * + * Used for pipeline order checking. + * + * @default - Order will not be checked + */ + readonly stackArtifactId?: string; + + /** + * Artifact ID for the stacks this stack depends on + * + * Used for pipeline order checking. + * + * @default - No dependencies + */ + readonly dependencyStackArtifactIds?: string[]; +} + +/** + * Options for the 'fromStackArtifact' operation + */ +export interface CdkStackActionFromArtifactOptions extends DeployCdkStackActionOptions { + /** + * The name of the stack that should be created/updated + * + * @default - Same as stack artifact + */ + readonly stackName?: string; +} + +/** + * Action to deploy a CDK Stack + * + * Adds two CodePipeline Actions to the pipeline: one to create a ChangeSet + * and one to execute it. + * + * You do not need to instantiate this action yourself -- it will automatically + * be added by the pipeline when you add stack artifacts or entire stages. + */ +export class DeployCdkStackAction implements codepipeline.IAction { + /** + * Construct a DeployCdkStackAction from a Stack artifact + */ + public static fromStackArtifact(scope: Construct, artifact: cxapi.CloudFormationStackArtifact, options: CdkStackActionFromArtifactOptions) { + if (!artifact.assumeRoleArn) { + // tslint:disable-next-line:max-line-length + throw new Error(`Stack '${artifact.stackName}' does not have deployment role information; use the 'DefaultStackSynthesizer' synthesizer, or set the '@aws-cdk/core:newStyleStackSynthesis' context key.`); + } + + const actionRole = roleFromPlaceholderArn(scope, artifact.assumeRoleArn); + const cloudFormationExecutionRole = roleFromPlaceholderArn(scope, artifact.cloudFormationExecutionRoleArn); + + const artRegion = artifact.environment.region; + const region = artRegion === Stack.of(scope).region || artRegion === cxapi.UNKNOWN_REGION ? undefined : artRegion; + + // We need the path of the template relative to the root Cloud Assembly + // It should be easier to get this, but for now it is what it is. + const appAsmRoot = assemblyBuilderOf(appOf(scope)).outdir; + const fullTemplatePath = path.join(artifact.assembly.directory, artifact.templateFile); + const templatePath = path.relative(appAsmRoot, fullTemplatePath); + + return new DeployCdkStackAction({ + actionRole, + cloudFormationExecutionRole, + templatePath, + region, + stackArtifactId: artifact.id, + dependencyStackArtifactIds: artifact.dependencies.filter(isStackArtifact).map(s => s.id), + stackName: options.stackName ?? artifact.stackName, + ...options, + }); + } + + /** + * The runorder for the prepare action + */ + public readonly prepareRunOrder: number; + + /** + * The runorder for the execute action + */ + public readonly executeRunOrder: number; + + /** + * Name of the deployed stack + */ + public readonly stackName: string; + + /** + * Artifact id of the artifact this action was based on + */ + public readonly stackArtifactId?: string; + + /** + * Artifact ids of the artifact this stack artifact depends on + */ + public readonly dependencyStackArtifactIds: string[]; + + private readonly prepareChangeSetAction: cpactions.CloudFormationCreateReplaceChangeSetAction; + private readonly executeChangeSetAction: cpactions.CloudFormationExecuteChangeSetAction; + + constructor(props: DeployCdkStackActionProps) { + if (props.output && !props.outputFileName) { + throw new Error('If \'output\' is set, \'outputFileName\' is also required'); + } + + this.stackArtifactId = props.stackArtifactId; + this.dependencyStackArtifactIds = props.dependencyStackArtifactIds ?? []; + + this.prepareRunOrder = props.prepareRunOrder ?? 1; + this.executeRunOrder = props.executeRunOrder ?? this.prepareRunOrder + 1; + this.stackName = props.stackName; + const baseActionName = props.baseActionName ?? this.stackName; + const changeSetName = props.changeSetName ?? 'PipelineChange'; + + this.prepareChangeSetAction = new cpactions.CloudFormationCreateReplaceChangeSetAction({ + actionName: `${baseActionName}.Prepare`, + changeSetName, + runOrder: this.prepareRunOrder, + stackName: this.stackName, + templatePath: props.cloudAssemblyInput.atPath(props.templatePath), + adminPermissions: false, + role: props.actionRole, + deploymentRole: props.cloudFormationExecutionRole, + region: props.region, + capabilities: [cfn.CloudFormationCapabilities.NAMED_IAM, cfn.CloudFormationCapabilities.AUTO_EXPAND], + }); + this.executeChangeSetAction = new cpactions.CloudFormationExecuteChangeSetAction({ + actionName: `${baseActionName}.Deploy`, + changeSetName, + runOrder: this.executeRunOrder, + stackName: this.stackName, + role: props.actionRole, + region: props.region, + outputFileName: props.outputFileName, + output: props.output, + }); + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): + codepipeline.ActionConfig { + stage.addAction(this.prepareChangeSetAction); + + return this.executeChangeSetAction.bind(scope, stage, options); + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + return this.executeChangeSetAction.onStateChange(name, target, options); + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + return this.executeChangeSetAction.actionProperties; + } +} + +function roleFromPlaceholderArn(scope: Construct, arn: string): iam.IRole; +function roleFromPlaceholderArn(scope: Construct, arn: string | undefined): iam.IRole | undefined; +function roleFromPlaceholderArn(scope: Construct, arn: string | undefined): iam.IRole | undefined { + if (!arn) { return undefined; } + + // Use placeholdered arn as construct ID. + const id = arn; + + scope = hackyRoleScope(scope, arn); + + // https://github.com/aws/aws-cdk/issues/7255 + let existingRole = scope.node.tryFindChild(`ImmutableRole${id}`) as iam.IRole; + if (existingRole) { return existingRole; } + // For when #7255 is fixed. + existingRole = scope.node.tryFindChild(id) as iam.IRole; + if (existingRole) { return existingRole; } + + return iam.Role.fromRoleArn(scope, id, cfnExpressionFromManifestString(arn), { mutable: false }); +} + +/** + * MASSIVE HACK + * + * We have a bug in the CDK where it's only going to consider Roles that are physically in a + * different Stack object from the Pipeline "cross-account", and will add the appropriate + * Bucket/Key policies. + * https://github.com/aws/aws-cdk/pull/8280 will resolve this, but for now we fake it by hacking + * up a Stack object to root the role in! + * + * Fortunatey, we can just 'new up' an unrooted Stack (unit tests do this all the time) and toss it + * away. It will never be synthesized, but all the logic happens to work out! + */ +function hackyRoleScope(scope: Construct, arn: string): Construct { + const parts = Arn.parse(cxapi.EnvironmentPlaceholders.replace(arn, { + accountId: '', // Empty string on purpose, see below + partition: '', + region: '', + })); + return new Stack(undefined, undefined, { + env: { + // Empty string means ARN had a placeholder which means same account as pipeline stack + account: parts.account || Stack.of(scope).account, + // 'region' from an IAM ARN is always an empty string, so no point. + }, + }); +} + +/** + * Return a CloudFormation expression from a manifest string with placeholders + */ +function cfnExpressionFromManifestString(s: string) { + // This implementation relies on the fact that the manifest placeholders are + // '${AWS::Partition}' etc., and so are the same values as those that are + // trivially substituable using a `Fn.sub`. + return Fn.sub(s); +} + +/** + * Options for CdkDeployAction.fromStackArtifact + */ +export interface FromStackArtifactOptions { + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * Run order for the 2 actions that will be created + * + * @default 1 + */ + readonly prepareRunOrder?: number; + + /** + * Run order for the Execute action + * + * @default - prepareRunOrder + 1 + */ + readonly executeRunOrder?: number; + + /** + * Artifact to write Stack Outputs to + * + * @default - No outputs + */ + readonly output?: codepipeline.Artifact; + + /** + * Filename in output to write Stack outputs to + * + * @default - Required when 'output' is set + */ + readonly outputFileName?: string; +} + +function isStackArtifact(a: cxapi.CloudArtifact): a is cxapi.CloudFormationStackArtifact { + return a instanceof cxapi.CloudFormationStackArtifact; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/actions/index.ts b/packages/@aws-cdk/pipelines/lib/actions/index.ts new file mode 100644 index 0000000000000..834ded93472f2 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/index.ts @@ -0,0 +1,3 @@ +export * from './deploy-cdk-stack-action'; +export * from './publish-assets-action'; +export * from './update-pipeline-action'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts b/packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts new file mode 100644 index 0000000000000..668d8f831b548 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts @@ -0,0 +1,153 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import * as iam from '@aws-cdk/aws-iam'; +import { Construct, Lazy } from '@aws-cdk/core'; + +/** + * Type of the asset that is being published + */ +export enum AssetType { + /** + * A file + */ + FILE = 'file', + + /** + * A Docker image + */ + DOCKER_IMAGE = 'docker-image', +} + +/** + * Props for a PublishAssetsAction + */ +export interface PublishAssetsActionProps { + /** + * Name of publishing action + */ + readonly actionName: string; + + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * AssetType we're publishing + */ + readonly assetType: AssetType; + + /** + * Version of CDK CLI to 'npm install'. + * + * @default - Latest version + */ + readonly cdkCliVersion?: string; + + /** + * Name of the CodeBuild project + * + * @default - Automatically generated + */ + readonly projectName?: string; +} + +/** + * Action to publish an asset in the pipeline + * + * Creates a CodeBuild project which will use the CDK CLI + * to prepare and publish the asset. + * + * You do not need to instantiate this action -- it will automatically + * be added by the pipeline when you add stacks that use assets. + */ +export class PublishAssetsAction extends Construct implements codepipeline.IAction { + private readonly action: codepipeline.IAction; + private readonly commands = new Array(); + + constructor(scope: Construct, id: string, private readonly props: PublishAssetsActionProps) { + super(scope, id); + + const installSuffix = props.cdkCliVersion ? `@${props.cdkCliVersion}` : ''; + + const project = new codebuild.PipelineProject(this, 'Default', { + projectName: this.props.projectName, + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + install: { + commands: `npm install -g cdk-assets${installSuffix}`, + }, + build: { + commands: Lazy.listValue({ produce: () => this.commands }), + }, + }, + }), + // Needed to perform Docker builds + environment: props.assetType === AssetType.DOCKER_IMAGE ? { privileged: true } : undefined, + }); + + const rolePattern = props.assetType === AssetType.DOCKER_IMAGE + ? 'arn:*:iam::*:role/*-image-publishing-role-*' + : 'arn:*:iam::*:role/*-file-publishing-role-*'; + + project.addToRolePolicy(new iam.PolicyStatement({ + actions: ['sts:AssumeRole'], + resources: [rolePattern], + })); + + this.action = new codepipeline_actions.CodeBuildAction({ + actionName: props.actionName, + project, + input: this.props.cloudAssemblyInput, + }); + } + + /** + * Add a single publishing command + * + * Manifest path should be relative to the root Cloud Assembly. + */ + public addPublishCommand(relativeManifestPath: string, assetSelector: string) { + const command = `cdk-assets --path "${relativeManifestPath}" --verbose publish "${assetSelector}"`; + if (!this.commands.includes(command)) { + this.commands.push(command); + } + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): + codepipeline.ActionConfig { + return this.action.bind(scope, stage, options); + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + return this.action.onStateChange(name, target, options); + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + // FIXME: I have had to make this class a Construct, because: + // + // - It needs access to the Construct tree, because it is going to add a `PipelineProject`. + // - I would have liked to have done that in bind(), however, + // - `actionProperties` (this method) is called BEFORE bind() is called, and by that point I + // don't have the "inner" Action yet to forward the call to. + // + // I've therefore had to construct the inner CodeBuildAction in the constructor, which requires making this + // Action a Construct. + // + // Combined with how non-intuitive it is to make the "StackDeployAction", I feel there is something + // wrong with the Action abstraction here. + return this.action.actionProperties; + } +} diff --git a/packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts b/packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts new file mode 100644 index 0000000000000..e7b19ac860102 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts @@ -0,0 +1,127 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as cpactions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import * as iam from '@aws-cdk/aws-iam'; +import { Construct } from '@aws-cdk/core'; +import { embeddedAsmPath } from '../private/construct-internals'; + +/** + * Props for the UpdatePipelineAction + */ +export interface UpdatePipelineActionProps { + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * Name of the pipeline stack + */ + readonly pipelineStackName: string; + + /** + * Version of CDK CLI to 'npm install'. + * + * @default - Latest version + */ + readonly cdkCliVersion?: string; + + /** + * Name of the CodeBuild project + * + * @default - Automatically generated + */ + readonly projectName?: string; +} + +/** + * Action to self-mutate the pipeline + * + * Creates a CodeBuild project which will use the CDK CLI + * to deploy the pipeline stack. + * + * You do not need to instantiate this action -- it will automatically + * be added by the pipeline. + */ +export class UpdatePipelineAction extends Construct implements codepipeline.IAction { + private readonly action: codepipeline.IAction; + + constructor(scope: Construct, id: string, props: UpdatePipelineActionProps) { + super(scope, id); + + const installSuffix = props.cdkCliVersion ? `@${props.cdkCliVersion}` : ''; + + const selfMutationProject = new codebuild.PipelineProject(this, 'SelfMutation', { + projectName: props.projectName, + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + install: { + commands: `npm install -g aws-cdk${installSuffix}`, + }, + build: { + commands: [ + // Cloud Assembly is in *current* directory. + `cdk -a ${embeddedAsmPath(scope)} deploy ${props.pipelineStackName} --require-approval=never --verbose`, + ], + }, + }, + }), + }); + + // allow the self-mutating project permissions to assume the bootstrap Action role + selfMutationProject.addToRolePolicy(new iam.PolicyStatement({ + actions: ['sts:AssumeRole'], + resources: ['arn:*:iam::*:role/*-deploy-role-*', 'arn:*:iam::*:role/*-publishing-role-*'], + })); + selfMutationProject.addToRolePolicy(new iam.PolicyStatement({ + actions: ['cloudformation:DescribeStacks'], + resources: ['*'], // this is needed to check the status of the bootstrap stack when doing `cdk deploy` + })); + // S3 checks for the presence of the ListBucket permission + selfMutationProject.addToRolePolicy(new iam.PolicyStatement({ + actions: ['s3:ListBucket'], + resources: ['*'], + })); + this.action = new cpactions.CodeBuildAction({ + actionName: 'SelfMutate', + input: props.cloudAssemblyInput, + project: selfMutationProject, + }); + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): + codepipeline.ActionConfig { + return this.action.bind(scope, stage, options); + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + return this.action.onStateChange(name, target, options); + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + // FIXME: I have had to make this class a Construct, because: + // + // - It needs access to the Construct tree, because it is going to add a `PipelineProject`. + // - I would have liked to have done that in bind(), however, + // - `actionProperties` (this method) is called BEFORE bind() is called, and by that point I + // don't have the "inner" Action yet to forward the call to. + // + // I've therefore had to construct the inner CodeBuildAction in the constructor, which requires making this + // Action a Construct. + // + // Combined with how non-intuitive it is to make the "StackDeployAction", I feel there is something + // wrong with the Action abstraction here. + return this.action.actionProperties; + } +} diff --git a/packages/@aws-cdk/pipelines/lib/index.ts b/packages/@aws-cdk/pipelines/lib/index.ts new file mode 100644 index 0000000000000..dbe8a73291c23 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/index.ts @@ -0,0 +1,5 @@ +export * from './pipeline'; +export * from './stage'; +export * from './synths'; +export * from './actions'; +export * from './validation'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/pipeline.ts b/packages/@aws-cdk/pipelines/lib/pipeline.ts new file mode 100644 index 0000000000000..75fb655560c6b --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/pipeline.ts @@ -0,0 +1,313 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { App, CfnOutput, Construct, Stack, Stage } from '@aws-cdk/core'; +import * as path from 'path'; +import { AssetType, DeployCdkStackAction, PublishAssetsAction, UpdatePipelineAction } from './actions'; +import { appOf, assemblyBuilderOf } from './private/construct-internals'; +import { AddStageOptions, AssetPublishingCommand, CdkStage, StackOutput } from './stage'; + +/** + * Properties for a CdkPipeline + */ +export interface CdkPipelineProps { + /** + * The CodePipeline action used to retrieve the CDK app's source + */ + readonly sourceAction: codepipeline.IAction; + + /** + * The CodePipeline action build and synthesis step of the CDK app + */ + readonly synthAction: codepipeline.IAction; + + /** + * The artifact you have defined to be the artifact to hold the cloudAssemblyArtifact for the synth action + */ + readonly cloudAssemblyArtifact: codepipeline.Artifact; + + /** + * Name of the pipeline + * + * @default - A name is automatically generated + */ + readonly pipelineName?: string; + + /** + * CDK CLI version to use in pipeline + * + * Some Actions in the pipeline will download and run a version of the CDK + * CLI. Specify the version here. + * + * @default - Latest version + */ + readonly cdkCliVersion?: string; +} + +/** + * A Pipeline to deploy CDK apps + * + * Defines an AWS CodePipeline-based Pipeline to deploy CDK applications. + * + * Automatically manages the following: + * + * - Stack dependency order. + * - Asset publishing. + * - Keeping the pipeline up-to-date as the CDK apps change. + * - Using stack outputs later on in the pipeline. + */ +export class CdkPipeline extends Construct { + private readonly _pipeline: codepipeline.Pipeline; + private readonly _assets: AssetPublishing; + private readonly _stages: CdkStage[] = []; + private readonly _outputArtifacts: Record = {}; + private readonly _cloudAssemblyArtifact: codepipeline.Artifact; + + constructor(scope: Construct, id: string, props: CdkPipelineProps) { + super(scope, id); + + if (!App.isApp(this.node.root)) { + throw new Error('CdkPipeline must be created under an App'); + } + + this._cloudAssemblyArtifact = props.cloudAssemblyArtifact; + const pipelineStack = Stack.of(this); + + this._pipeline = new codepipeline.Pipeline(this, 'Pipeline', { + ...props, + restartExecutionOnUpdate: true, + stages: [ + { + stageName: 'Source', + actions: [props.sourceAction], + }, + { + stageName: 'Build', + actions: [props.synthAction], + }, + { + stageName: 'UpdatePipeline', + actions: [new UpdatePipelineAction(this, 'UpdatePipeline', { + cloudAssemblyInput: this._cloudAssemblyArtifact, + pipelineStackName: pipelineStack.stackName, + cdkCliVersion: props.cdkCliVersion, + projectName: maybeSuffix(props.pipelineName, '-selfupdate'), + })], + }, + ], + }); + + this._assets = new AssetPublishing(this, 'Assets', { + cloudAssemblyInput: this._cloudAssemblyArtifact, + cdkCliVersion: props.cdkCliVersion, + pipeline: this._pipeline, + projectName: maybeSuffix(props.pipelineName, '-publish'), + }); + } + + /** + * Add pipeline stage that will deploy the given application stage + * + * The application construct should subclass `Stage` and can contain any + * number of `Stacks` inside it that may have dependency relationships + * on one another. + * + * All stacks in the application will be deployed in the appropriate order, + * and all assets found in the application will be added to the asset + * publishing stage. + */ + public addApplicationStage(appStage: Stage, options: AddStageOptions = {}): CdkStage { + const stage = this.addStage(appStage.stageName); + stage.addApplication(appStage, options); + return stage; + } + + /** + * Add a new, empty stage to the pipeline + * + * Prefer to use `addApplicationStage` if you are intended to deploy a CDK + * application, but you can use this method if you want to add other kinds of + * Actions to a pipeline. + */ + public addStage(stageName: string) { + const pipelineStage = this._pipeline.addStage({ + stageName, + }); + + const stage = new CdkStage(this, stageName, { + cloudAssemblyArtifact: this._cloudAssemblyArtifact, + pipelineStage, + stageName, + host: { + publishAsset: this._assets.addPublishAssetAction.bind(this._assets), + stackOutputArtifact: (artifactId) => this._outputArtifacts[artifactId], + }, + }); + this._stages.push(stage); + return stage; + } + + /** + * Get the StackOutput object that holds this CfnOutput's value in this pipeline + * + * `StackOutput` can be used in validation actions later in the pipeline. + */ + public stackOutput(cfnOutput: CfnOutput): StackOutput { + const stack = Stack.of(cfnOutput); + + if (!this._outputArtifacts[stack.artifactId]) { + // We should have stored the ArtifactPath in the map, but its Artifact + // property isn't publicly readable... + this._outputArtifacts[stack.artifactId] = new codepipeline.Artifact(`Artifact_${stack.artifactId}_Outputs`); + } + + return new StackOutput(this._outputArtifacts[stack.artifactId].atPath('outputs.json'), cfnOutput.logicalId); + } + + /** + * Validate that we don't have any stacks violating dependency order in the pipeline + * + * Our own convenience methods will never generate a pipeline that does that (although + * this is a nice verification), but a user can also add the stacks by hand. + */ + protected validate(): string[] { + const ret = new Array(); + + ret.push(...this.validateDeployOrder()); + ret.push(...this.validateRequestedOutputs()); + + return ret; + } + + protected onPrepare() { + super.onPrepare(); + + // TODO: Support this in a proper way in the upstream library. For now, we + // "un-add" the Assets stage if it turns out to be empty. + this._assets.removeAssetsStageIfEmpty(); + } + + /** + * Return all StackDeployActions in an ordered list + */ + private get stackActions(): DeployCdkStackAction[] { + return flatMap(this._pipeline.stages, s => s.actions.filter(isDeployAction)); + } + + private* validateDeployOrder(): IterableIterator { + const stackActions = this.stackActions; + for (const stackAction of stackActions) { + // For every dependency, it must be executed in an action before this one is prepared. + for (const depId of stackAction.dependencyStackArtifactIds) { + const depAction = stackActions.find(s => s.stackArtifactId === depId); + + if (depAction === undefined) { + this.node.addWarning(`Stack '${stackAction.stackName}' depends on stack ` + + `'${depId}', but that dependency is not deployed through the pipeline!`); + } else if (!(depAction.executeRunOrder < stackAction.prepareRunOrder)) { + yield `Stack '${stackAction.stackName}' depends on stack ` + + `'${depAction.stackName}', but is deployed before it in the pipeline!`; + } + } + } + } + + private* validateRequestedOutputs(): IterableIterator { + const artifactIds = this.stackActions.map(s => s.stackArtifactId); + + for (const artifactId of Object.keys(this._outputArtifacts)) { + if (!artifactIds.includes(artifactId)) { + yield `Trying to use outputs for Stack '${artifactId}', but Stack is not deployed in this pipeline. Add it to the pipeline.`; + } + } + } +} + +function isDeployAction(a: codepipeline.IAction): a is DeployCdkStackAction { + return a instanceof DeployCdkStackAction; +} + +function flatMap(xs: A[], f: (x: A) => B[]): B[] { + return Array.prototype.concat([], ...xs.map(f)); +} + +interface AssetPublishingProps { + readonly cloudAssemblyInput: codepipeline.Artifact; + readonly pipeline: codepipeline.Pipeline; + readonly cdkCliVersion?: string; + readonly projectName?: string; +} + +/** + * Add appropriate publishing actions to the asset publishing stage + */ +class AssetPublishing extends Construct { + private readonly publishers: Record = {}; + private readonly myCxAsmRoot: string; + + private readonly stage: codepipeline.IStage; + private _fileAssetCtr = 1; + private _dockerAssetCtr = 1; + + constructor(scope: Construct, id: string, private readonly props: AssetPublishingProps) { + super(scope, id); + this.myCxAsmRoot = path.resolve(assemblyBuilderOf(appOf(this)).outdir); + + // We MUST add the Stage immediately here, otherwise it will be in the wrong place + // in the pipeline! + this.stage = this.props.pipeline.addStage({ stageName: 'Assets' }); + } + + /** + * Make sure there is an action in the stage to publish the given asset + * + * Assets are grouped by asset ID (which represent individual assets) so all assets + * are published in parallel. For each assets, all destinations are published sequentially + * so that we can reuse expensive operations between them (mostly: building a Docker image). + */ + public addPublishAssetAction(command: AssetPublishingCommand) { + // FIXME: this is silly, we need the relative path here but no easy way to get it + const relativePath = path.relative(this.myCxAsmRoot, command.assetManifestPath); + + let action = this.publishers[command.assetId]; + if (!action) { + // The asset ID would be a logical candidate for the construct path and project names, but if the asset + // changes it leads to recreation of a number of Role/Policy/Project resources which is slower than + // necessary. Number sequentially instead. + // + // FIXME: The ultimate best solution is probably to generate a single Project per asset type + // and reuse that for all assets. + + const id = command.assetType === AssetType.FILE ? `FileAsset${this._fileAssetCtr++}` : `DockerAsset${this._dockerAssetCtr++}`; + + action = this.publishers[command.assetId] = new PublishAssetsAction(this, id, { + actionName: command.assetId, + cloudAssemblyInput: this.props.cloudAssemblyInput, + cdkCliVersion: this.props.cdkCliVersion, + assetType: command.assetType, + }); + this.stage.addAction(action); + } + + action.addPublishCommand(relativePath, command.assetSelector); + } + + /** + * Remove the Assets stage if it turns out we didn't add any Assets to publish + */ + public removeAssetsStageIfEmpty() { + if (Object.keys(this.publishers).length === 0) { + // Hacks to get access to innards of Pipeline + // Modify 'stages' array in-place to remove Assets stage if empty + const stages: codepipeline.IStage[] = (this.props.pipeline as any)._stages; + + const ix = stages.indexOf(this.stage); + if (ix > -1) { + stages.splice(ix, 1); + } + } + } +} + +function maybeSuffix(x: string | undefined, suffix: string): string | undefined { + if (x === undefined) { return undefined; } + return `${x}${suffix}`; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts b/packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts new file mode 100644 index 0000000000000..752c7c242bc48 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts @@ -0,0 +1,296 @@ +// FIXME: copied from `ckd-assets`, because this tool needs to read the asset manifest aswell. +import { AssetManifest, DockerImageDestination, DockerImageSource, FileDestination, FileSource, Manifest } from '@aws-cdk/cloud-assembly-schema'; +import * as fs from 'fs'; +import * as path from 'path'; + +/** + * A manifest of assets + */ +export class AssetManifestReader { + /** + * The default name of the asset manifest in a cdk.out directory + */ + public static readonly DEFAULT_FILENAME = 'assets.json'; + + /** + * Load an asset manifest from the given file + */ + public static fromFile(fileName: string) { + try { + const obj = Manifest.loadAssetManifest(fileName); + + return new AssetManifestReader(path.dirname(fileName), obj); + } catch (e) { + throw new Error(`Canot read asset manifest '${fileName}': ${e.message}`); + } + } + + /** + * Load an asset manifest from the given file or directory + * + * If the argument given is a directoy, the default asset file name will be used. + */ + public static fromPath(filePath: string) { + let st; + try { + st = fs.statSync(filePath); + } catch (e) { + throw new Error(`Cannot read asset manifest at '${filePath}': ${e.message}`); + } + if (st.isDirectory()) { + return AssetManifestReader.fromFile(path.join(filePath, AssetManifestReader.DEFAULT_FILENAME)); + } + return AssetManifestReader.fromFile(filePath); + } + + /** + * The directory where the manifest was found + */ + public readonly directory: string; + + constructor(directory: string, private readonly manifest: AssetManifest) { + this.directory = directory; + } + + /** + * Select a subset of assets and destinations from this manifest. + * + * Only assets with at least 1 selected destination are retained. + * + * If selection is not given, everything is returned. + */ + public select(selection?: DestinationPattern[]): AssetManifestReader { + if (selection === undefined) { return this; } + + const ret: AssetManifest & Required> + = { version: this.manifest.version, dockerImages: {}, files: {} }; + + for (const assetType of ASSET_TYPES) { + for (const [assetId, asset] of Object.entries(this.manifest[assetType] || {})) { + const filteredDestinations = filterDict( + asset.destinations, + (_, destId) => selection.some(sel => sel.matches(new DestinationIdentifier(assetId, destId)))); + + if (Object.keys(filteredDestinations).length > 0) { + ret[assetType][assetId] = { + ...asset, + destinations: filteredDestinations, + }; + } + } + } + + return new AssetManifestReader(this.directory, ret); + } + + /** + * Describe the asset manifest as a list of strings + */ + public list() { + return [ + ...describeAssets('file', this.manifest.files || {}), + ...describeAssets('docker-image', this.manifest.dockerImages || {}), + ]; + + function describeAssets(type: string, assets: Record }>) { + const ret = new Array(); + for (const [assetId, asset] of Object.entries(assets || {})) { + ret.push(`${assetId} ${type} ${JSON.stringify(asset.source)}`); + + const destStrings = Object.entries(asset.destinations).map(([destId, dest]) => ` ${assetId}:${destId} ${JSON.stringify(dest)}`); + ret.push(...prefixTreeChars(destStrings, ' ')); + } + return ret; + } + } + + /** + * List of assets, splat out to destinations + */ + public get entries(): IManifestEntry[] { + return [ + ...makeEntries(this.manifest.files || {}, FileManifestEntry), + ...makeEntries(this.manifest.dockerImages || {}, DockerImageManifestEntry), + ]; + + function makeEntries( + assets: Record }>, + ctor: new (id: DestinationIdentifier, source: A, destination: B) => C): C[] { + + const ret = new Array(); + for (const [assetId, asset] of Object.entries(assets)) { + for (const [destId, destination] of Object.entries(asset.destinations)) { + ret.push(new ctor(new DestinationIdentifier(assetId, destId), asset.source, destination)); + } + } + return ret; + } + } +} + +type AssetType = 'files' | 'dockerImages'; + +const ASSET_TYPES: AssetType[] = ['files', 'dockerImages']; + +/** + * A single asset from an asset manifest' + */ +export interface IManifestEntry { + /** + * The identifier of the asset + */ + readonly id: DestinationIdentifier; + + /** + * The type of asset + */ + readonly type: string; + + /** + * Type-dependent source data + */ + readonly genericSource: unknown; + + /** + * Type-dependent destination data + */ + readonly genericDestination: unknown; +} + +/** + * A manifest entry for a file asset + */ +export class FileManifestEntry implements IManifestEntry { + public readonly genericSource: unknown; + public readonly genericDestination: unknown; + public readonly type = 'file'; + + constructor( + /** Identifier for this asset */ + public readonly id: DestinationIdentifier, + /** Source of the file asset */ + public readonly source: FileSource, + /** Destination for the file asset */ + public readonly destination: FileDestination, + ) { + this.genericSource = source; + this.genericDestination = destination; + } +} + +/** + * A manifest entry for a docker image asset + */ +export class DockerImageManifestEntry implements IManifestEntry { + public readonly genericSource: unknown; + public readonly genericDestination: unknown; + public readonly type = 'docker-image'; + + constructor( + /** Identifier for this asset */ + public readonly id: DestinationIdentifier, + /** Source of the file asset */ + public readonly source: DockerImageSource, + /** Destination for the file asset */ + public readonly destination: DockerImageDestination, + ) { + this.genericSource = source; + this.genericDestination = destination; + } +} + +/** + * Identify an asset destination in an asset manifest + */ +export class DestinationIdentifier { + /** + * Identifies the asset, by source. + */ + public readonly assetId: string; + + /** + * Identifies the destination where this asset will be published + */ + public readonly destinationId: string; + + constructor(assetId: string, destinationId: string) { + this.assetId = assetId; + this.destinationId = destinationId; + } + + /** + * Return a string representation for this asset identifier + */ + public toString() { + return this.destinationId ? `${this.assetId}:${this.destinationId}` : this.assetId; + } +} + +function filterDict(xs: Record, pred: (x: A, key: string) => boolean): Record { + const ret: Record = {}; + for (const [key, value] of Object.entries(xs)) { + if (pred(value, key)) { + ret[key] = value; + } + } + return ret; +} + +/** + * A filter pattern for an destination identifier + */ +export class DestinationPattern { + /** + * Parse a ':'-separated string into an asset/destination identifier + */ + public static parse(s: string) { + if (!s) { throw new Error('Empty string is not a valid destination identifier'); } + const parts = s.split(':').map(x => x !== '*' ? x : undefined); + if (parts.length === 1) { return new DestinationPattern(parts[0]); } + if (parts.length === 2) { return new DestinationPattern(parts[0] || undefined, parts[1] || undefined); } + throw new Error(`Asset identifier must contain at most 2 ':'-separated parts, got '${s}'`); + } + + /** + * Identifies the asset, by source. + */ + public readonly assetId?: string; + + /** + * Identifies the destination where this asset will be published + */ + public readonly destinationId?: string; + + constructor(assetId?: string, destinationId?: string) { + this.assetId = assetId; + this.destinationId = destinationId; + } + + /** + * Whether or not this pattern matches the given identifier + */ + public matches(id: DestinationIdentifier) { + return (this.assetId === undefined || this.assetId === id.assetId) + && (this.destinationId === undefined || this.destinationId === id.destinationId); + } + + /** + * Return a string representation for this asset identifier + */ + public toString() { + return `${this.assetId ?? '*'}:${this.destinationId ?? '*'}`; + } +} + +/** + * Prefix box-drawing characters to make lines look like a hanging tree + */ +function prefixTreeChars(xs: string[], prefix = '') { + const ret = new Array(); + for (let i = 0; i < xs.length; i++) { + const isLast = i === xs.length - 1; + const boxChar = isLast ? '└' : '├'; + ret.push(`${prefix}${boxChar}${xs[i]}`); + } + return ret; +} diff --git a/packages/@aws-cdk/pipelines/lib/private/construct-internals.ts b/packages/@aws-cdk/pipelines/lib/private/construct-internals.ts new file mode 100644 index 0000000000000..13b1ac7c1dd0c --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/private/construct-internals.ts @@ -0,0 +1,37 @@ +/** + * Get access to construct internals that we need but got removed from the Stages PR. + */ +import { App, IConstruct, Stage } from '@aws-cdk/core'; +import * as cxapi from '@aws-cdk/cx-api'; +import * as path from 'path'; + +export function appOf(construct: IConstruct): App { + const root = construct.node.root; + + if (!App.isApp(root)) { + throw new Error(`Construct must be created under an App, but is not: ${construct.node.path}`); + } + + return root; +} + +export function assemblyBuilderOf(stage: Stage): cxapi.CloudAssemblyBuilder { + return (stage as any)._assemblyBuilder; +} + +/** + * Return the relative path from the app assembly to the scope's (nested) assembly + */ +export function embeddedAsmPath(scope: IConstruct) { + const appAsmRoot = assemblyBuilderOf(appOf(scope)).outdir; + const stage = Stage.of(scope) ?? appOf(scope); + const stageAsmRoot = assemblyBuilderOf(stage).outdir; + return path.relative(appAsmRoot, stageAsmRoot) || '.'; +} + +/** + * Determine the directory where the cloud assembly will be written, for use in a BuildSpec + */ +export function cloudAssemblyBuildSpecDir(scope: IConstruct) { + return assemblyBuilderOf(appOf(scope)).outdir; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/private/toposort.ts b/packages/@aws-cdk/pipelines/lib/private/toposort.ts new file mode 100644 index 0000000000000..8386a6d26bb82 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/private/toposort.ts @@ -0,0 +1,47 @@ +export type KeyFunc = (x: T) => string; +export type DepFunc = (x: T) => string[]; + +/** + * Return a topological sort of all elements of xs, according to the given dependency functions + * + * Dependencies outside the referenced set are ignored. + * + * Not a stable sort, but in order to keep the order as stable as possible, we'll sort by key + * among elements of equal precedence. + * + * Returns tranches of elements of equal precedence. + */ +export function topologicalSort(xs: Iterable, keyFn: KeyFunc, depFn: DepFunc): T[][] { + const remaining = new Map>(); + for (const element of xs) { + const key = keyFn(element); + remaining.set(key, { key, element, dependencies: depFn(element) }); + } + + const ret = new Array(); + while (remaining.size > 0) { + // All elements with no more deps in the set can be ordered + const selectable = Array.from(remaining.values()).filter(e => e.dependencies.every(d => !remaining.has(d))); + + selectable.sort((a, b) => a.key < b.key ? -1 : b.key < a.key ? 1 : 0); + + // If we didn't make any progress, we got stuck + if (selectable.length === 0) { + throw new Error(`Could not determine ordering between: ${Array.from(remaining.keys()).join(', ')}`); + } + + ret.push(selectable.map(s => s.element)); + + for (const selected of selectable) { + remaining.delete(selected.key); + } + } + + return ret; +} + +interface TopoElement { + key: string; + dependencies: string[]; + element: T; +} diff --git a/packages/@aws-cdk/pipelines/lib/stage.ts b/packages/@aws-cdk/pipelines/lib/stage.ts new file mode 100644 index 0000000000000..267bcb9543ef6 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/stage.ts @@ -0,0 +1,386 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as cpactions from '@aws-cdk/aws-codepipeline-actions'; +import { Construct, Stage } from '@aws-cdk/core'; +import * as cxapi from '@aws-cdk/cx-api'; +import { AssetType, DeployCdkStackAction } from './actions'; +import { AssetManifestReader, DockerImageManifestEntry, FileManifestEntry } from './private/asset-manifest'; +import { topologicalSort } from './private/toposort'; + +/** + * Construction properties for a CdkStage + */ +export interface CdkStageProps { + /** + * Name of the stage that should be created + */ + readonly stageName: string; + + /** + * The underlying Pipeline Stage associated with thisCdkStage + */ + readonly pipelineStage: codepipeline.IStage; + + /** + * The CodePipeline Artifact with the Cloud Assembly + */ + readonly cloudAssemblyArtifact: codepipeline.Artifact; + + /** + * Features the Stage needs from its environment + */ + readonly host: IStageHost; +} + +/** + * Stage in a CdkPipeline + * + * You don't need to instantiate this class directly. Use + * `cdkPipeline.addStage()` instead. + */ +export class CdkStage extends Construct { + private _nextSequentialRunOrder = 1; // Must start at 1 eh + private _manualApprovalCounter = 1; + private readonly pipelineStage: codepipeline.IStage; + private readonly cloudAssemblyArtifact: codepipeline.Artifact; + private readonly stacksToDeploy = new Array(); + private readonly stageName: string; + private readonly host: IStageHost; + private _prepared = false; + + constructor(scope: Construct, id: string, props: CdkStageProps) { + super(scope, id); + + this.stageName = props.stageName; + this.pipelineStage = props.pipelineStage; + this.cloudAssemblyArtifact = props.cloudAssemblyArtifact; + this.host = props.host; + } + + /** + * Add all stacks in the application Stage to this stage + * + * The application construct should subclass `Stage` and can contain any + * number of `Stacks` inside it that may have dependency relationships + * on one another. + * + * All stacks in the application will be deployed in the appropriate order, + * and all assets found in the application will be added to the asset + * publishing stage. + */ + public addApplication(appStage: Stage, options: AddStageOptions = {}) { + const asm = appStage.synth(); + + const sortedTranches = topologicalSort(asm.stacks, + stack => stack.id, + stack => stack.dependencies.map(d => d.id)); + + for (const stacks of sortedTranches) { + const runOrder = this.nextSequentialRunOrder(2); // We need 2 actions + let executeRunOrder = runOrder + 1; + + // If we need to insert a manual approval action, then what's the executeRunOrder + // now is where we add a manual approval step, and we allocate 1 more runOrder + // for the execute. + if (options.manualApprovals) { + this.addManualApprovalAction({ runOrder: executeRunOrder }); + executeRunOrder = this.nextSequentialRunOrder(); + } + + // These don't have a dependency on each other, so can all be added in parallel + for (const stack of stacks) { + this.addStackArtifactDeployment(stack, { runOrder, executeRunOrder }); + } + } + } + + /** + * Add a deployment action based on a stack artifact + */ + public addStackArtifactDeployment(stackArtifact: cxapi.CloudFormationStackArtifact, options: AddStackOptions = {}) { + // Get all assets manifests and add the assets in 'em to the asset publishing stage. + this.publishAssetDependencies(stackArtifact); + + // Remember for later, see 'prepare()' + // We know that deploying a stack is going to take up 2 runorder slots later on. + const runOrder = options.runOrder ?? this.nextSequentialRunOrder(2); + const executeRunOrder = options.executeRunOrder ?? runOrder + 1; + this.stacksToDeploy.push({ + prepareRunOrder: runOrder, + executeRunOrder, + stackArtifact, + }); + + this.advanceRunOrderPast(runOrder); + this.advanceRunOrderPast(executeRunOrder); + } + + /** + * Add a manual approval action + * + * If you need more flexibility than what this method offers, + * use `addAction` with a `ManualApprovalAction`. + */ + public addManualApprovalAction(options: AddManualApprovalOptions = {}) { + let actionName = options.actionName; + if (!actionName) { + actionName = `ManualApproval${this._manualApprovalCounter > 1 ? this._manualApprovalCounter : ''}`; + this._manualApprovalCounter += 1; + } + + this.addActions(new cpactions.ManualApprovalAction({ + actionName, + runOrder: options.runOrder ?? this.nextSequentialRunOrder(), + })); + } + + /** + * Add one or more CodePipeline Actions + * + * You need to make sure it is created with the right runOrder. Call `nextSequentialRunOrder()` + * for every action to get actions to execute in sequence. + */ + public addActions(...actions: codepipeline.IAction[]) { + for (const action of actions) { + this.pipelineStage.addAction(action); + } + } + + /** + * Return the runOrder number necessary to run the next Action in sequence with the rest + * + * FIXME: This is here because Actions are immutable and can't be reordered + * after creation, nor is there a way to specify relative priorities, which + * is a limitation that we should take away in the base library. + */ + public nextSequentialRunOrder(count: number = 1): number { + const ret = this._nextSequentialRunOrder; + this._nextSequentialRunOrder += count; + return ret; + } + + /** + * Whether this Stage contains an action to deploy the given stack, identified by its artifact ID + */ + public deploysStack(artifactId: string) { + return this.stacksToDeploy.map(s => s.stackArtifact.id).includes(artifactId); + } + + /** + * Actually add all the DeployStack actions to the stage. + * + * We do this late because before we can render the actual DeployActions, + * we need to know whether or not we need to capture the stack outputs. + * + * FIXME: This is here because Actions are immutable and can't be reordered + * after creation, nor is there a way to specify relative priorities, which + * is a limitation that we should take away in the base library. + */ + protected prepare() { + // FIXME: Make sure this only gets run once. There seems to be an issue in the reconciliation + // loop that may trigger this more than once if it throws an error somewhere, and the exception + // that gets thrown here will then override the actual failure. + if (this._prepared) { return; } + this._prepared = true; + + for (const { prepareRunOrder: runOrder, stackArtifact } of this.stacksToDeploy) { + const artifact = this.host.stackOutputArtifact(stackArtifact.id); + + this.pipelineStage.addAction(DeployCdkStackAction.fromStackArtifact(this, stackArtifact, { + baseActionName: this.simplifyStackName(stackArtifact.stackName), + cloudAssemblyInput: this.cloudAssemblyArtifact, + output: artifact, + outputFileName: artifact ? 'outputs.json' : undefined, + prepareRunOrder: runOrder, + })); + } + } + + /** + * Advance the runorder counter so that the next sequential number is higher than the given one + */ + private advanceRunOrderPast(lastUsed: number) { + this._nextSequentialRunOrder = Math.max(lastUsed + 1, this._nextSequentialRunOrder); + } + + /** + * Simplify the stack name by removing the `Stage-` prefix if it exists. + */ + private simplifyStackName(s: string) { + return stripPrefix(s, `${this.stageName}-`); + } + + /** + * Make sure all assets depended on by this stack are published in this pipeline + * + * Taking care to exclude the stack template itself -- it is being published + * as an asset because the CLI needs to know the asset publishing role when + * pushing the template to S3, but in the case of CodePipeline we always + * reference the template from the artifact bucket. + * + * (NOTE: this is only true for top-level stacks, not nested stacks. Nested + * Stack templates are always published as assets). + */ + private publishAssetDependencies(stackArtifact: cxapi.CloudFormationStackArtifact) { + const assetManifests = stackArtifact.dependencies.filter(isAssetManifest); + + for (const manifestArtifact of assetManifests) { + const manifest = AssetManifestReader.fromFile(manifestArtifact.file); + + for (const entry of manifest.entries) { + let assetType: AssetType; + if (entry instanceof DockerImageManifestEntry) { + assetType = AssetType.DOCKER_IMAGE; + } else if (entry instanceof FileManifestEntry) { + // Don't publishg the template for this stack + if (entry.source.packaging === 'file' && entry.source.path === stackArtifact.templateFile) { + continue; + } + + assetType = AssetType.FILE; + } else { + throw new Error(`Unrecognized asset type: ${entry.type}`); + } + + this.host.publishAsset({ + assetManifestPath: manifestArtifact.file, + assetId: entry.id.assetId, + assetSelector: entry.id.toString(), + assetType, + }); + } + } + } +} + +/** + * Additional options for adding a stack deployment + */ +export interface AddStackOptions { + /** + * Base runorder + * + * @default - Next sequential runorder + */ + readonly runOrder?: number; + + /** + * Base runorder + * + * @default - runOrder + 1 + */ + readonly executeRunOrder?: number; +} + +/** + * A single output of a Stack + */ +export class StackOutput { + /** + * The artifact and file the output is stored in + */ + public readonly artifactFile: codepipeline.ArtifactPath; + + /** + * The name of the output in the JSON object in the file + */ + public readonly outputName: string; + + /** + * Build a StackOutput from a known artifact and an output name + */ + constructor(artifactFile: codepipeline.ArtifactPath, outputName: string) { + this.artifactFile = artifactFile; + this.outputName = outputName; + } +} + +function stripPrefix(s: string, prefix: string) { + return s.startsWith(prefix) ? s.substr(prefix.length) : s; +} + +function isAssetManifest(s: cxapi.CloudArtifact): s is cxapi.AssetManifestArtifact { + return s instanceof cxapi.AssetManifestArtifact; +} + +/** + * Features that the Stage needs from its environment + */ +export interface IStageHost { + /** + * Make sure all the assets from the given manifest are published + */ + publishAsset(command: AssetPublishingCommand): void; + + /** + * Return the Artifact the given stack has to emit its outputs into, if any + */ + stackOutputArtifact(stackArtifactId: string): codepipeline.Artifact | undefined; +} + +/** + * Instructions to publish certain assets + */ +export interface AssetPublishingCommand { + /** + * Asset manifest path + */ + readonly assetManifestPath: string; + + /** + * Asset identifier + */ + readonly assetId: string; + + /** + * Asset selector to pass to `cdk-assets`. + */ + readonly assetSelector: string; + + /** + * Type of asset to publish + */ + readonly assetType: AssetType; +} + +/** + * Options for adding an application stage to a pipeline + */ +export interface AddStageOptions { + /** + * Add manual approvals before executing change sets + * + * This gives humans the opportunity to confirm the change set looks alright + * before deploying it. + * + * @default false + */ + readonly manualApprovals?: boolean; +} + +/** + * Options for addManualApproval + */ +export interface AddManualApprovalOptions { + /** + * The name of the manual approval action + * + * @default 'ManualApproval' with a rolling counter + */ + readonly actionName?: string; + + /** + * The runOrder for this action + * + * @default - The next sequential runOrder + */ + readonly runOrder?: number; +} + +/** + * Queued "deploy stack" command that is reified during prepare() + */ +interface DeployStackCommand { + prepareRunOrder: number; + executeRunOrder: number; + stackArtifact: cxapi.CloudFormationStackArtifact; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/synths/_util.ts b/packages/@aws-cdk/pipelines/lib/synths/_util.ts new file mode 100644 index 0000000000000..83f83bc802564 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/synths/_util.ts @@ -0,0 +1,15 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; + +export function copyEnvironmentVariables(...names: string[]): Record { + const ret: Record = {}; + for (const name of names) { + if (process.env[name]) { + ret[name] = { value: process.env[name] }; + } + } + return ret; +} + +export function filterEmpty(xs: Array): string[] { + return xs.filter(x => x) as any; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/synths/index.ts b/packages/@aws-cdk/pipelines/lib/synths/index.ts new file mode 100644 index 0000000000000..4764f7d9647c6 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/synths/index.ts @@ -0,0 +1 @@ +export * from './simple-synth-action'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts b/packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts new file mode 100644 index 0000000000000..bebbed0f9f44d --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts @@ -0,0 +1,353 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import { Construct } from '@aws-cdk/core'; +import * as path from 'path'; +import { cloudAssemblyBuildSpecDir } from '../private/construct-internals'; +import { copyEnvironmentVariables, filterEmpty } from './_util'; + +/** + * Configuration options for a SimpleSynth + */ +export interface SimpleSynthOptions { + /** + * The source artifact of the CodePipeline + */ + readonly sourceArtifact: codepipeline.Artifact; + + /** + * The artifact where the CloudAssembly should be emitted + */ + readonly cloudAssemblyArtifact: codepipeline.Artifact; + + /** + * Environment variables to send into build + * + * @default - No additional environment variables + */ + readonly environmentVariables?: Record; + + /** + * Environment variables to copy over from parent env + * + * These are environment variables that are being used by the build. + * + * @default - No environment variables copied + */ + readonly copyEnvironmentVariables?: string[]; + + /** + * Name of the build action + * + * @default 'Synth' + */ + readonly actionName?: string; + + /** + * Name of the CodeBuild project + * + * @default - Automatically generated + */ + readonly projectName?: string; + + /** + * Build environment to use for CodeBuild job + * + * @default BuildEnvironment.LinuxBuildImage.STANDARD_1_0 + */ + readonly environment?: codebuild.BuildEnvironment; + + /** + * Directory inside the source where package.json and cdk.json are located + * + * @default - Repository root + */ + readonly subdirectory?: string; + + /** + * Produce additional output artifacts after the build based on the given directories + * + * Can be used to produce additional artifacts during the build step, + * separate from the cloud assembly, which can be used further on in the + * pipeline. + * + * Directories are evaluated with respect to `subdirectory`. + * + * @default - No additional artifacts generated + */ + readonly additionalArtifacts?: AdditionalArtifact[]; +} + +/** + * Construction props for SimpleSynthAction + */ +export interface SimpleSynthActionProps extends SimpleSynthOptions { + /** + * The synth command + */ + readonly synthCommand: string; + + /** + * The install command + * + * @default - No install required + */ + readonly installCommand?: string; + + /** + * The build command + * + * By default, we assume NPM projects are either written in JavaScript or are + * using `ts-node`, so don't need a build command. + * + * Otherwise, put the build command here, for example `npm run build`. + * + * @default - No build required + */ + readonly buildCommand?: string; +} + +/** + * Specification of an additional artifact to generate + */ +export interface AdditionalArtifact { + /** + * Directory to be packaged + */ + readonly directory: string; + + /** + * Artifact to represent the build directory in the pipeline + */ + readonly artifact: codepipeline.Artifact; +} + +/** + * A standard synth with a generated buildspec + */ +export class SimpleSynthAction implements codepipeline.IAction { + + /** + * Create a standard NPM synth action + * + * Uses `npm ci` to install dependencies and `npx cdk synth` to synthesize. + * + * If you need a build step, add `buildCommand: 'npm run build'`. + */ + public static standardNpmSynth(options: StandardNpmSynthOptions) { + return new SimpleSynthAction({ + ...options, + installCommand: options.installCommand ?? 'npm ci', + synthCommand: options.synthCommand ?? 'npx cdk synth', + }); + } + + /** + * Create a standard Yarn synth action + * + * Uses `yarn install --frozen-lockfile` to install dependencies and `npx cdk synth` to synthesize. + * + * If you need a build step, add `buildCommand: 'yarn build'`. + */ + public static standardYarnSynth(options: StandardYarnSynthOptions) { + return new SimpleSynthAction({ + ...options, + installCommand: options.synthCommand ?? 'yarn install --frozen-lockfile', + synthCommand: options.synthCommand ?? 'npx cdk synth', + }); + } + + private _action?: codepipeline_actions.CodeBuildAction; + private _actionProperties: codepipeline.ActionProperties; + + constructor(private readonly props: SimpleSynthActionProps) { + // A number of actionProperties get read before bind() is even called (so before we + // have made the Project and can construct the actual CodeBuildAction) + // + // - actionName + // - resource + // - region + // - category + // - role + // - owner + this._actionProperties = { + actionName: props.actionName ?? 'Synth', + category: codepipeline.ActionCategory.BUILD, + provider: 'CodeBuild', + artifactBounds: { minInputs: 0, maxInputs: 5, minOutputs: 0, maxOutputs: 5 }, + inputs: [props.sourceArtifact], + outputs: [props.cloudAssemblyArtifact, ...(props.additionalArtifacts ?? []).map(a => a.artifact)], + }; + + const addls = props.additionalArtifacts ?? []; + if (Object.keys(addls).length > 0) { + if (!props.cloudAssemblyArtifact.artifactName) { + throw new Error('You must give all output artifacts, including the \'cloudAssemblyArtifact\', names when using \'additionalArtifacts\''); + } + for (const addl of addls) { + if (!addl.artifact.artifactName) { + throw new Error('You must give all output artifacts passed to SimpleSynthAction names when using \'additionalArtifacts\''); + } + } + } + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + return this._actionProperties; + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): codepipeline.ActionConfig { + const buildCommand = this.props.buildCommand; + const synthCommand = this.props.synthCommand; + const installCommand = this.props.installCommand; + + const project = new codebuild.PipelineProject(scope, 'CdkBuildProject', { + projectName: this.props.projectName ?? this.props.projectName, + environment: this.props.environment, + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + pre_build: { + commands: filterEmpty([ + this.props.subdirectory ? `cd ${this.props.subdirectory}` : '', + installCommand, + ]), + }, + build: { + commands: filterEmpty([ + buildCommand, + synthCommand, + ]), + }, + }, + artifacts: renderArtifacts(this), + }), + environmentVariables: { + ...copyEnvironmentVariables(...this.props.copyEnvironmentVariables || []), + ...this.props.environmentVariables, + }, + }); + + this._action = new codepipeline_actions.CodeBuildAction({ + actionName: this.actionProperties.actionName, + input: this.props.sourceArtifact, + outputs: [this.props.cloudAssemblyArtifact, ...(this.props.additionalArtifacts ?? []).map(a => a.artifact)], + project, + }); + this._actionProperties = this._action.actionProperties; + + return this._action.bind(scope, stage, options); + + function renderArtifacts(self: SimpleSynthAction) { + // save the generated files in the output artifact + // This part of the buildspec has to look completely different depending on whether we're + // using secondary artifacts or not. + + const cloudAsmArtifactSpec = { + 'base-directory': path.join(self.props.subdirectory ?? '.', cloudAssemblyBuildSpecDir(scope)), + 'files': '**/*', + }; + + if (self.props.additionalArtifacts) { + const secondary: Record = {}; + if (!self.props.cloudAssemblyArtifact.artifactName) { + throw new Error('When using additional output artifacts, you must also name the CloudAssembly artifact'); + } + secondary[self.props.cloudAssemblyArtifact.artifactName] = cloudAsmArtifactSpec; + self.props.additionalArtifacts.forEach((art) => { + if (!art.artifact.artifactName) { + throw new Error('You must give the output artifact a name'); + } + secondary[art.artifact.artifactName] = { + 'base-directory': path.join(self.props.subdirectory ?? '.', art.directory), + 'files': '**/*', + }; + }); + + return { 'secondary-artifacts': secondary }; + } + + return cloudAsmArtifactSpec; + } + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + if (!this._action) { + throw new Error('Need bind() first'); + } + + return this._action.onStateChange(name, target, options); + } +} + +/** + * Options for a convention-based synth using NPM + */ +export interface StandardNpmSynthOptions extends SimpleSynthOptions { + /** + * The install command + * + * @default 'npm ci' + */ + readonly installCommand?: string; + + /** + * The build command + * + * By default, we assume NPM projects are either written in JavaScript or are + * using `ts-node`, so don't need a build command. + * + * Otherwise, put the build command here, for example `npm run build`. + * + * @default - No build required + */ + readonly buildCommand?: string; + + /** + * The synth command + * + * @default 'npx cdk synth' + */ + readonly synthCommand?: string; +} + +/** + * Options for a convention-based synth using Yarn + */ +export interface StandardYarnSynthOptions extends SimpleSynthOptions { + /** + * The install command + * + * @default 'yarn install --frozen-lockfile' + */ + readonly installCommand?: string; + + /** + * The build command + * + * By default, we assume NPM projects are either written in JavaScript or are + * using `ts-node`, so don't need a build command. + * + * Otherwise, put the build command here, for example `npm run build`. + * + * @default - No build required + */ + readonly buildCommand?: string; + + /** + * The synth command + * + * @default 'npx cdk synth' + */ + readonly synthCommand?: string; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/validation/_files.ts b/packages/@aws-cdk/pipelines/lib/validation/_files.ts new file mode 100644 index 0000000000000..2f2bbf7be35ea --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/validation/_files.ts @@ -0,0 +1,97 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { IGrantable } from '@aws-cdk/aws-iam'; +import * as s3assets from '@aws-cdk/aws-s3-assets'; +import { Construct } from '@aws-cdk/core'; + +/** + * Additional files to use in a shell script + */ +export abstract class Files { + /** + * Use the files from a CodePipeline artifact + */ + public static fromArtifact(artifact: codepipeline.Artifact): Files { + if (!artifact) { + // Typechecking may mess up + throw new Error('Files.fromArtifact(): input artifact is required, got undefined'); + } + + return { + bind: () => ({ artifact }), + grantRead: () => { /* Not necessary */ }, + }; + } + + /** + * Create a new asset to bundle up the files in a directory on disk + */ + public static fromDirectory(directoryPath: string): Files { + let realFiles: Files; + return { + bind(scope: Construct) { + realFiles = Files.fromAsset(new s3assets.Asset(scope, directoryPath, { + path: directoryPath, + })); + + return realFiles.bind(scope); + }, + grantRead(grantee: IGrantable) { + if (!realFiles) { + throw new Error('bind() must be called first'); + } + realFiles.grantRead(grantee); + }, + }; + } + + /** + * Use an existing asset as a file source + */ + public static fromAsset(asset: s3assets.Asset): Files { + return { + bind: () => ({ + commands: [ + `echo "Downloading additional files from ${asset.s3ObjectUrl}"`, + `aws s3 cp ${asset.s3ObjectUrl} /tmp/dl.zip`, + 'unzip /tmp/dl.zip -d .', + ], + }), + grantRead: (grantee) => asset.grantRead(grantee), + }; + } + + protected constructor() { + } + + /** + * Bind the Files to a usage location + */ + public abstract bind(scope: Construct): FilesConfig; + + /** + * Grant read permissions to the file set to the given grantable + * + * Must be called after bind(). + */ + + public abstract grantRead(grantee: IGrantable): void; +} + +/** + * Config for a Files source + */ +export interface FilesConfig { + /** + * CodePipeline artifact to add to the set of input artifacts for the project + * + * @default - No artifact + */ + readonly artifact?: codepipeline.Artifact; + + /** + * Commands to add to the set of commands for the project + * + * @default - No commands + */ + readonly commands?: string[]; +} diff --git a/packages/@aws-cdk/pipelines/lib/validation/index.ts b/packages/@aws-cdk/pipelines/lib/validation/index.ts new file mode 100644 index 0000000000000..f2751fc92af49 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/validation/index.ts @@ -0,0 +1 @@ +export * from './shell-script-action'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts b/packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts new file mode 100644 index 0000000000000..301e641cb15fa --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts @@ -0,0 +1,183 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import { Construct } from '@aws-cdk/core'; +import { StackOutput } from '../stage'; + +/** + * Properties for ShellScriptValidation + */ +export interface ShellScriptActionProps { + /** + * Name of the validation action in the pipeline + */ + readonly actionName: string; + + /** + * Stack outputs to make available as environment variables + * + * @default - No outputs used + */ + readonly useOutputs?: Record; + + /** + * Commands to run + */ + readonly commands: string[]; + + /** + * Bash options to set at the start of the script + * + * @default '-eu' (errexit and nounset) + */ + readonly bashOptions?: string; + + /** + * Additional artifacts to use as input for the CodeBuild project + * + * You can use these files to load more complex test sets into the + * shellscript build environment. + * + * The files artifact given here will be unpacked into the current + * working directory, the other ones will be unpacked into directories + * which are available through the environment variables + * $CODEBUILD_SRC_DIR_. + * + * The CodeBuild job must have at least one input artifact, so you + * must provide either at least one additional artifact here or one + * stack output using `useOutput`. + * + * @default - No additional artifacts + */ + readonly additionalArtifacts?: codepipeline.Artifact[]; + + /** + * RunOrder for this action + * + * Use this to sequence the shell script after the deployments. + * + * The default value is 100 so you don't have to supply the value if you just + * want to run this after the application stacks have been deployed, and you + * don't have more than 100 stacks. + * + * @default 100 + */ + readonly runOrder?: number; +} + +/** + * Validate a revision using shell commands + */ +export class ShellScriptAction implements codepipeline.IAction { + private _project?: codebuild.IProject; + + private _action?: codepipeline_actions.CodeBuildAction; + private _actionProperties: codepipeline.ActionProperties; + + constructor(private readonly props: ShellScriptActionProps) { + // A number of actionProperties get read before bind() is even called (so before we + // have made the Project and can construct the actual CodeBuildAction) + // + // - actionName + // - resource + // - region + // - category + // - role + // - owner + this._actionProperties = { + actionName: props.actionName, + category: codepipeline.ActionCategory.BUILD, + provider: 'CodeBuild', + artifactBounds: { minInputs: 0, maxInputs: 5, minOutputs: 0, maxOutputs: 5 }, + inputs: [], + outputs: [], + }; + + if (Object.keys(props.useOutputs ?? {}).length + (props.additionalArtifacts ?? []).length === 0) { + throw new Error('You must supply either \'useOutputs\' or \'additionalArtifacts\', since a CodeBuild Action must always have at least one input artifact.'); + } + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + return this._actionProperties; + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): codepipeline.ActionConfig { + const inputs = new Array(); + inputs.push(...this.props.additionalArtifacts ?? []); + + const envVarCommands = new Array(); + + const bashOptions = this.props.bashOptions ?? '-eu'; + if (bashOptions) { + envVarCommands.push(`set ${bashOptions}`); + } + for (const [varName, output] of Object.entries(this.props.useOutputs ?? {})) { + const outputArtifact = output.artifactFile; + + // Add the artifact to the list of inputs, if it's not in there already. Determine + // the location where CodeBuild is going to stick it based on whether it's the first (primary) + // input or an 'extra input', then parse. + let artifactIndex = inputs.findIndex(a => a.artifactName === outputArtifact.artifact.artifactName); + if (artifactIndex === -1) { + artifactIndex = inputs.push(outputArtifact.artifact) - 1; + } + const dirEnv = artifactIndex === 0 ? 'CODEBUILD_SRC_DIR' : `CODEBUILD_SRC_DIR_${outputArtifact.artifact.artifactName}`; + envVarCommands.push(`export ${varName}="$(node -pe 'require(process.env.${dirEnv} + "/${outputArtifact.fileName}")["${output.outputName}"]')"`); + } + + this._project = new codebuild.PipelineProject(scope, 'Project', { + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + build: { + commands: [ + ...envVarCommands, + ...this.props.commands, + ], + }, + }, + }), + }); + + this._action = new codepipeline_actions.CodeBuildAction({ + actionName: this.props.actionName, + input: inputs[0], + extraInputs: inputs.slice(1), + runOrder: this.props.runOrder ?? 100, + project: this._project, + }); + // Replace the placeholder actionProperties at the last minute + this._actionProperties = this._action.actionProperties; + + return this._action.bind(scope, stage, options); + } + + /** + * Project generated to run the shell script in + */ + public get project(): codebuild.IProject { + if (!this._project) { + throw new Error('Project becomes available after ShellScriptAction has been bound to a stage'); + } + return this._project; + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + if (!this._action) { + throw new Error('Need bind() first'); + } + + return this._action.onStateChange(name, target, options); + } +} diff --git a/packages/@aws-cdk/pipelines/package.json b/packages/@aws-cdk/pipelines/package.json new file mode 100644 index 0000000000000..b6a5f4a51acfa --- /dev/null +++ b/packages/@aws-cdk/pipelines/package.json @@ -0,0 +1,119 @@ +{ + "name": "@aws-cdk/pipelines", + "version": "0.0.0", + "description": "Continuous Delivery of CDK applications", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-cdk.git", + "directory": "packages/@aws-cdk/pipelines" + }, + "bin": {}, + "scripts": { + "build": "cdk-build", + "watch": "cdk-watch", + "lint": "cdk-lint", + "test": "cdk-test", + "integ": "cdk-integ", + "pkglint": "pkglint -f", + "package": "cdk-package", + "awslint": "cdk-awslint", + "build+test+package": "npm run build+test && npm run package", + "build+test": "npm run build && npm test", + "compat": "cdk-compat" + }, + "author": { + "name": "Amazon Web Services", + "url": "https://aws.amazon.com", + "organization": true + }, + "devDependencies": { + "@aws-cdk/assert": "0.0.0", + "@types/nodeunit": "^0.0.31", + "cdk-build-tools": "0.0.0", + "cdk-integ-tools": "0.0.0", + "cfn2ts": "0.0.0", + "nodeunit": "^0.11.3", + "pkglint": "0.0.0", + "@aws-cdk/aws-s3": "0.0.0", + "@aws-cdk/aws-ecr-assets": "0.0.0" + }, + "peerDependencies": { + "constructs": "^3.0.2", + "@aws-cdk/core": "0.0.0", + "@aws-cdk/aws-codebuild": "0.0.0", + "@aws-cdk/aws-codepipeline": "0.0.0", + "@aws-cdk/aws-codepipeline-actions": "0.0.0", + "@aws-cdk/aws-events": "0.0.0", + "@aws-cdk/aws-iam": "0.0.0", + "@aws-cdk/cloud-assembly-schema": "0.0.0", + "@aws-cdk/aws-s3-assets": "0.0.0", + "@aws-cdk/cx-api": "0.0.0", + "@aws-cdk/aws-cloudformation": "0.0.0" + }, + "dependencies": { + "constructs": "^3.0.2", + "@aws-cdk/core": "0.0.0", + "@aws-cdk/aws-codebuild": "0.0.0", + "@aws-cdk/aws-codepipeline": "0.0.0", + "@aws-cdk/aws-codepipeline-actions": "0.0.0", + "@aws-cdk/cloud-assembly-schema": "0.0.0", + "@aws-cdk/aws-events": "0.0.0", + "@aws-cdk/aws-iam": "0.0.0", + "@aws-cdk/aws-s3-assets": "0.0.0", + "@aws-cdk/cx-api": "0.0.0", + "@aws-cdk/aws-cloudformation": "0.0.0" + }, + "bundledDependencies": [], + "keywords": [ + "aws", + "cdk", + "constructs", + "pipelines", + "cicd", + "continuous", + "delivery" + ], + "engines": { + "node": ">= 10.13.0 <13 || >=13.7.0" + }, + "license": "Apache-2.0", + "stability": "experimental", + "maturity": "developer-preview", + "jsii": { + "outdir": "dist", + "targets": { + "java": { + "package": "software.amazon.awscdk.pipelines", + "maven": { + "groupId": "software.amazon.awscdk", + "artifactId": "cdk-pipelines" + } + }, + "dotnet": { + "namespace": "Amazon.CDK.Pipelines", + "packageId": "Amazon.CDK.Pipelines", + "signAssembly": true, + "assemblyOriginatorKeyFile": "../../key.snk", + "iconUrl": "https://raw.githubusercontent.com/aws/aws-cdk/master/logo/default-256-dark.png" + }, + "python": { + "distName": "aws-cdk.pipelines", + "module": "aws_cdk.pipelines" + } + } + }, + "awscdkio": { + "announce": false + }, + "awslint": { + "exclude": [ + "events-generic:@aws-cdk/pipelines.PublishAssetsAction", + "events-method-signature:@aws-cdk/pipelines.PublishAssetsAction.onStateChange", + "events-generic:@aws-cdk/pipelines.UpdatePipelineAction", + "events-method-signature:@aws-cdk/pipelines.UpdatePipelineAction.onStateChange" + ] + }, + "homepage": "https://github.com/aws/aws-cdk" +} diff --git a/packages/@aws-cdk/pipelines/test/builds.test.ts b/packages/@aws-cdk/pipelines/test/builds.test.ts new file mode 100644 index 0000000000000..95f1fddf11ad9 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/builds.test.ts @@ -0,0 +1,142 @@ +import { arrayWith, deepObjectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { Stack } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { encodedJson } from './testmatchers'; +import { PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let sourceArtifact: codepipeline.Artifact; +let cloudAssemblyArtifact: codepipeline.Artifact; + +beforeEach(() => { + app = new TestApp({ outdir: 'testcdk.out' }); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + sourceArtifact = new codepipeline.Artifact(); + cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test.each([['npm'], ['yarn']])('%s build automatically determines artifact base-directory', (npmYarn) => { + // WHEN + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: npmYarnBuild(npmYarn)({ sourceArtifact, cloudAssemblyArtifact }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + artifacts: { + 'base-directory': 'testcdk.out', + }, + })), + }, + }); +}); + +test.each([['npm'], ['yarn']])('%s build respects subdirectory', (npmYarn) => { + // WHEN + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: npmYarnBuild(npmYarn)({ + sourceArtifact, + cloudAssemblyArtifact, + subdirectory: 'subdir', + }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + pre_build: { + commands: arrayWith('cd subdir'), + }, + }, + artifacts: { + 'base-directory': 'subdir/testcdk.out', + }, + })), + }, + }); +}); + +test.each([['npm'], ['yarn']])('%s assumes no build step by default', (npmYarn) => { + // WHEN + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: npmYarnBuild(npmYarn)({ sourceArtifact, cloudAssemblyArtifact }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: ['npx cdk synth'], + }, + }, + })), + }, + }); +}); + +test('Standard (NPM) synth can output additional artifacts', () => { + // WHEN + sourceArtifact = new codepipeline.Artifact(); + cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); + + const addlArtifact = new codepipeline.Artifact('IntegTest'); + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + additionalArtifacts: [ + { + artifact: addlArtifact, + directory: 'test', + }, + ], + }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + artifacts: { + 'secondary-artifacts': { + CloudAsm: { + 'base-directory': 'testcdk.out', + 'files': '**/*', + }, + IntegTest: { + 'base-directory': 'test', + 'files': '**/*', + }, + }, + }, + })), + }, + }); +}); + +function npmYarnBuild(npmYarn: string) { + if (npmYarn === 'npm') { return cdkp.SimpleSynthAction.standardNpmSynth; } + if (npmYarn === 'yarn') { return cdkp.SimpleSynthAction.standardYarnSynth; } + throw new Error(`Expecting npm|yarn: ${npmYarn}`); +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts b/packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts new file mode 100644 index 0000000000000..b891574d6e3a6 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts @@ -0,0 +1,76 @@ +import { arrayWith, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import { Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { stringLike } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('in a cross-account/cross-region setup, artifact bucket can be read by deploy role', () => { + // WHEN + pipeline.addApplicationStage(new TestApplication(app, 'MyApp', { + env: { account: '321elsewhere', region: 'us-elsewhere' }, + })); + + // THEN + app.synth(); + const supportStack = app.node.findAll().filter(Stack.isStack).find(s => s.stackName === 'PipelineStack-support-us-elsewhere'); + expect(supportStack).not.toBeUndefined(); + + expect(supportStack).toHaveResourceLike('AWS::S3::BucketPolicy', { + PolicyDocument: { + Statement: arrayWith(objectLike({ + Action: arrayWith('s3:GetObject*', 's3:GetBucket*', 's3:List*'), + Principal: { + AWS: { + 'Fn::Sub': stringLike('*-deploy-role-*'), + }, + }, + })), + }, + }); +}); + +test('in a cross-account/same-region setup, artifact bucket can be read by deploy role', () => { + // WHEN + pipeline.addApplicationStage(new TestApplication(app, 'MyApp', { + env: { account: '321elsewhere', region: PIPELINE_ENV.region }, + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::S3::BucketPolicy', { + PolicyDocument: { + Statement: arrayWith(objectLike({ + Action: ['s3:GetObject*', 's3:GetBucket*', 's3:List*'], + Principal: { + AWS: { + 'Fn::Sub': stringLike('*-deploy-role-*'), + }, + }, + })), + }, + }); +}); + +/** + * Our application + */ +class TestApplication extends Stage { + constructor(scope: Construct, id: string, props: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack'); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json b/packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json new file mode 100644 index 0000000000000..1e212d090d82a --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json @@ -0,0 +1,1316 @@ +{ + "Resources": { + "PipelineUpdatePipelineSelfMutationRole57E559E8": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineUpdatePipelineSelfMutationRoleDefaultPolicyA225DA4E": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases" + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codebuild:test-region:12345678:report-group/", + { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + }, + "-*" + ] + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": [ + "arn:*:iam::*:role/*-deploy-role-*", + "arn:*:iam::*:role/*-publishing-role-*" + ] + }, + { + "Action": "cloudformation:DescribeStacks", + "Effect": "Allow", + "Resource": "*" + }, + { + "Action": "s3:ListBucket", + "Effect": "Allow", + "Resource": "*" + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineUpdatePipelineSelfMutationRoleDefaultPolicyA225DA4E", + "Roles": [ + { + "Ref": "PipelineUpdatePipelineSelfMutationRole57E559E8" + } + ] + } + }, + "PipelineUpdatePipelineSelfMutationDAA41400": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationRole57E559E8", + "Arn" + ] + }, + "Source": { + "BuildSpec": "{\n \"version\": \"0.2\",\n \"phases\": {\n \"install\": {\n \"commands\": \"npm install -g aws-cdk\"\n },\n \"build\": {\n \"commands\": [\n \"cdk -a . deploy PipelineStack --require-approval=never --verbose\"\n ]\n }\n }\n}", + "Type": "CODEPIPELINE" + }, + "EncryptionKey": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + }, + "PipelineArtifactsBucketEncryptionKeyF5BF0670": { + "Type": "AWS::KMS::Key", + "Properties": { + "KeyPolicy": { + "Statement": [ + { + "Action": [ + "kms:Create*", + "kms:Describe*", + "kms:Enable*", + "kms:List*", + "kms:Put*", + "kms:Update*", + "kms:Revoke*", + "kms:Disable*", + "kms:Get*", + "kms:Delete*", + "kms:ScheduleKeyDeletion", + "kms:CancelKeyDeletion", + "kms:GenerateDataKey", + "kms:TagResource", + "kms:UntagResource" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineRoleB27FAA37", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProjectRole231EEA2A", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProjectRole231EEA2A", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationRole57E559E8", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationRole57E559E8", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProjectRole69B20A71", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProjectRole69B20A71", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + } + }, + "Resource": "*" + } + ], + "Version": "2012-10-17" + } + }, + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete" + }, + "PipelineArtifactsBucketAEA9A052": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "ServerSideEncryptionByDefault": { + "KMSMasterKeyID": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + }, + "SSEAlgorithm": "aws:kms" + } + } + ] + }, + "PublicAccessBlockConfiguration": { + "BlockPublicAcls": true, + "BlockPublicPolicy": true, + "IgnorePublicAcls": true, + "RestrictPublicBuckets": true + } + }, + "UpdateReplacePolicy": "Retain", + "DeletionPolicy": "Retain" + }, + "PipelineArtifactsBucketEncryptionKeyAlias94A07392": { + "Type": "AWS::KMS::Alias", + "Properties": { + "AliasName": "alias/codepipeline-pipelinestackpipelinee95eedaa", + "TargetKeyId": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete" + }, + "PipelineRoleB27FAA37": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codepipeline.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineRoleDefaultPolicy7BDC1ABB": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*", + "s3:DeleteObject*", + "s3:PutObject*", + "s3:Abort*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineBuildSynthCodePipelineActionRole4E7A6C97", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineRoleDefaultPolicy7BDC1ABB", + "Roles": [ + { + "Ref": "PipelineRoleB27FAA37" + } + ] + } + }, + "Pipeline9850B417": { + "Type": "AWS::CodePipeline::Pipeline", + "Properties": { + "RoleArn": { + "Fn::GetAtt": [ + "PipelineRoleB27FAA37", + "Arn" + ] + }, + "Stages": [ + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Source", + "Owner": "ThirdParty", + "Provider": "GitHub", + "Version": "1" + }, + "Configuration": { + "Owner": "OWNER", + "Repo": "REPO", + "Branch": "master", + "OAuthToken": "not-a-secret", + "PollForSourceChanges": true + }, + "Name": "GitHub", + "OutputArtifacts": [ + { + "Name": "Artifact_Source_GitHub" + } + ], + "RunOrder": 1 + } + ], + "Name": "Source" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + } + }, + "InputArtifacts": [ + { + "Name": "Artifact_Source_GitHub" + } + ], + "Name": "Synth", + "OutputArtifacts": [ + { + "Name": "CloudAsm" + }, + { + "Name": "IntegTests" + } + ], + "RoleArn": { + "Fn::GetAtt": [ + "PipelineBuildSynthCodePipelineActionRole4E7A6C97", + "Arn" + ] + }, + "RunOrder": 1 + } + ], + "Name": "Build" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + } + }, + "InputArtifacts": [ + { + "Name": "CloudAsm" + } + ], + "Name": "SelfMutate", + "RoleArn": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF", + "Arn" + ] + }, + "RunOrder": 1 + } + ], + "Name": "UpdatePipeline" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + } + }, + "InputArtifacts": [ + { + "Name": "Artifact_Source_GitHub" + } + ], + "Name": "UseSource", + "RoleArn": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA", + "Arn" + ] + }, + "RunOrder": 100 + }, + { + "ActionTypeId": { + "Category": "Deploy", + "Owner": "AWS", + "Provider": "CloudFormation", + "Version": "1" + }, + "Configuration": { + "StackName": "PreProd-Stack", + "Capabilities": "CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND", + "RoleArn": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}" + }, + "ActionMode": "CHANGE_SET_REPLACE", + "ChangeSetName": "PipelineChange", + "TemplatePath": "CloudAsm::assembly-PipelineStack-PreProd/PipelineStackPreProdStack65A0AD1F.template.json" + }, + "InputArtifacts": [ + { + "Name": "CloudAsm" + } + ], + "Name": "Stack.Prepare", + "RoleArn": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + }, + "RunOrder": 1 + }, + { + "ActionTypeId": { + "Category": "Deploy", + "Owner": "AWS", + "Provider": "CloudFormation", + "Version": "1" + }, + "Configuration": { + "StackName": "PreProd-Stack", + "ActionMode": "CHANGE_SET_EXECUTE", + "ChangeSetName": "PipelineChange" + }, + "Name": "Stack.Deploy", + "RoleArn": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + }, + "RunOrder": 2 + } + ], + "Name": "PreProd" + } + ], + "ArtifactStore": { + "EncryptionKey": { + "Id": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + }, + "Type": "KMS" + }, + "Location": { + "Ref": "PipelineArtifactsBucketAEA9A052" + }, + "Type": "S3" + }, + "RestartExecutionOnUpdate": true + }, + "DependsOn": [ + "PipelineRoleDefaultPolicy7BDC1ABB", + "PipelineRoleB27FAA37" + ] + }, + "PipelineBuildSynthCodePipelineActionRole4E7A6C97": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineBuildSynthCodePipelineActionRoleDefaultPolicy92C90290": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuild", + "codebuild:StopBuild" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProject6BEFA8E6", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineBuildSynthCodePipelineActionRoleDefaultPolicy92C90290", + "Roles": [ + { + "Ref": "PipelineBuildSynthCodePipelineActionRole4E7A6C97" + } + ] + } + }, + "PipelineBuildSynthCdkBuildProjectRole231EEA2A": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineBuildSynthCdkBuildProjectRoleDefaultPolicyFB6C941C": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases" + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codebuild:test-region:12345678:report-group/", + { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + }, + "-*" + ] + ] + } + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*", + "s3:DeleteObject*", + "s3:PutObject*", + "s3:Abort*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineBuildSynthCdkBuildProjectRoleDefaultPolicyFB6C941C", + "Roles": [ + { + "Ref": "PipelineBuildSynthCdkBuildProjectRole231EEA2A" + } + ] + } + }, + "PipelineBuildSynthCdkBuildProject6BEFA8E6": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProjectRole231EEA2A", + "Arn" + ] + }, + "Source": { + "BuildSpec": "{\n \"version\": \"0.2\",\n \"phases\": {\n \"pre_build\": {\n \"commands\": [\n \"npm ci\"\n ]\n },\n \"build\": {\n \"commands\": [\n \"npx cdk synth\"\n ]\n }\n },\n \"artifacts\": {\n \"secondary-artifacts\": {\n \"CloudAsm\": {\n \"base-directory\": \"cdk.out\",\n \"files\": \"**/*\"\n },\n \"IntegTests\": {\n \"base-directory\": \"test\",\n \"files\": \"**/*\"\n }\n }\n }\n}", + "Type": "CODEPIPELINE" + }, + "EncryptionKey": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + }, + "Name": "MyServicePipeline-synth" + } + }, + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleDefaultPolicyE626265B": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuild", + "codebuild:StopBuild" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationDAA41400", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleDefaultPolicyE626265B", + "Roles": [ + { + "Ref": "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF" + } + ] + } + }, + "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelinePreProdUseSourceCodePipelineActionRoleDefaultPolicy9BE325AD": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuild", + "codebuild:StopBuild" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProject2E711EB4", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelinePreProdUseSourceCodePipelineActionRoleDefaultPolicy9BE325AD", + "Roles": [ + { + "Ref": "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA" + } + ] + } + }, + "PipelinePreProdUseSourceProjectRole69B20A71": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelinePreProdUseSourceProjectRoleDefaultPolicy50F68DF3": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases" + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codebuild:test-region:12345678:report-group/", + { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + }, + "-*" + ] + ] + } + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelinePreProdUseSourceProjectRoleDefaultPolicy50F68DF3", + "Roles": [ + { + "Ref": "PipelinePreProdUseSourceProjectRole69B20A71" + } + ] + } + }, + "PipelinePreProdUseSourceProject2E711EB4": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProjectRole69B20A71", + "Arn" + ] + }, + "Source": { + "BuildSpec": "{\n \"version\": \"0.2\",\n \"phases\": {\n \"build\": {\n \"commands\": [\n \"set -eu\",\n \"cat README.md\"\n ]\n }\n }\n}", + "Type": "CODEPIPELINE" + }, + "EncryptionKey": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + } + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/integ.pipeline.ts b/packages/@aws-cdk/pipelines/test/integ.pipeline.ts new file mode 100644 index 0000000000000..f0a4da9dde073 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/integ.pipeline.ts @@ -0,0 +1,80 @@ +/// !cdk-integ PipelineStack +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import { App, CfnResource, Construct, SecretValue, Stack, StackProps, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; + +class MyStage extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const stack = new Stack(this, 'Stack'); + new CfnResource(stack, 'Resource', { + type: 'AWS::Test::SomeResource', + }); + } +} + +/** + * The stack that defines the application pipeline + */ +class CdkpipelinesDemoPipelineStack extends Stack { + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const sourceArtifact = new codepipeline.Artifact(); + const cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); + const integTestArtifact = new codepipeline.Artifact('IntegTests'); + + const pipeline = new cdkp.CdkPipeline(this, 'Pipeline', { + cloudAssemblyArtifact, + + // Where the source can be found + sourceAction: new codepipeline_actions.GitHubSourceAction({ + actionName: 'GitHub', + output: sourceArtifact, + oauthToken: SecretValue.plainText('not-a-secret'), + owner: 'OWNER', + repo: 'REPO', + trigger: codepipeline_actions.GitHubTrigger.POLL, + }), + + // How it will be built + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + projectName: 'MyServicePipeline-synth', + additionalArtifacts: [ + { + directory: 'test', + artifact: integTestArtifact, + }, + ], + }), + }); + + // This is where we add the application stages + // ... + const stage = pipeline.addApplicationStage(new MyStage(this, 'PreProd')); + stage.addActions( + new cdkp.ShellScriptAction({ + actionName: 'UseSource', + commands: [ + // Comes from source + 'cat README.md', + ], + additionalArtifacts: [sourceArtifact], + }), + ); + } +} + +const app = new App({ + context: { + '@aws-cdk/core:newStyleStackSynthesis': 'true', + }, +}); +new CdkpipelinesDemoPipelineStack(app, 'PipelineStack', { + env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, +}); +app.synth(); diff --git a/packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts b/packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts new file mode 100644 index 0000000000000..c1fb0b79b13f9 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts @@ -0,0 +1,216 @@ +import { arrayWith, deepObjectLike, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import * as ecr_assets from '@aws-cdk/aws-ecr-assets'; +import * as s3_assets from '@aws-cdk/aws-s3-assets'; +import { Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as path from 'path'; +import * as cdkp from '../lib'; +import { encodedJson, notMatching, stringLike } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +const FILE_ASSET_SOURCE_HASH = '8289faf53c7da377bb2b90615999171adef5e1d8f6b88810e5fef75e6ca09ba5'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('no assets stage if the application has no assets', () => { + // WHEN + pipeline.addApplicationStage(new PlainStackApp(app, 'App')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: notMatching(arrayWith(objectLike({ + Name: 'Assets', + }))), + }); +}); + +test('command line properly locates assets in subassembly', () => { + // WHEN + pipeline.addApplicationStage(new FileAssetApp(app, 'FileAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + // tslint:disable-next-line: max-line-length + commands: arrayWith(`cdk-assets --path "assembly-FileAssetApp/FileAssetAppStackEADD68C5.assets.json" --verbose publish "${FILE_ASSET_SOURCE_HASH}:current_account-current_region"`), + }, + }, + })), + }, + }); +}); + +test('multiple assets are published in parallel', () => { + // WHEN + pipeline.addApplicationStage(new TwoFileAssetsApp(app, 'FileAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Assets', + Actions: [ + objectLike({ RunOrder: 1 }), + objectLike({ RunOrder: 1 }), + ], + }), + }); +}); + +test('assets are also published when using the lower-level addStackArtifactDeployment', () => { + // GIVEN + const asm = new FileAssetApp(app, 'FileAssetApp').synth(); + + // WHEN + pipeline.addStage('SomeStage').addStackArtifactDeployment(asm.getStackByName('FileAssetApp-Stack')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Assets', + Actions: [ + objectLike({ + Name: FILE_ASSET_SOURCE_HASH, + RunOrder: 1, + }), + ], + }), + }); +}); + +test('file image asset publishers do not use privilegedmode, have right AssumeRole', () => { + // WHEN + pipeline.addApplicationStage(new FileAssetApp(app, 'FileAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: arrayWith(stringLike('cdk-assets *')), + }, + }, + })), + }, + Environment: objectLike({ + PrivilegedMode: false, + }), + }); + + expect(pipelineStack).toHaveResourceLike('AWS::IAM::Policy', { + PolicyDocument: { + Statement: arrayWith({ + Action: 'sts:AssumeRole', + Effect: 'Allow', + Resource: 'arn:*:iam::*:role/*-file-publishing-role-*', + }), + }, + }); +}); + +test('docker image asset publishers use privilegedmode, have right AssumeRole', () => { + // WHEN + pipeline.addApplicationStage(new DockerAssetApp(app, 'DockerAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: arrayWith(stringLike('cdk-assets *')), + }, + }, + })), + }, + Environment: objectLike({ + PrivilegedMode: true, + }), + }); + expect(pipelineStack).toHaveResourceLike('AWS::IAM::Policy', { + PolicyDocument: { + Statement: arrayWith({ + Action: 'sts:AssumeRole', + Effect: 'Allow', + Resource: 'arn:*:iam::*:role/*-image-publishing-role-*', + }), + }, + }); +}); + +test('can control fix/CLI version used in pipeline selfupdate', () => { + // WHEN + const stack2 = new Stack(app, 'Stack2', { env: PIPELINE_ENV }); + const pipeline2 = new TestGitHubNpmPipeline(stack2, 'Cdk2', { + cdkCliVersion: '1.2.3', + }); + pipeline2.addApplicationStage(new FileAssetApp(stack2, 'FileAssetApp')); + + // THEN + expect(stack2).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + install: { + commands: 'npm install -g cdk-assets@1.2.3', + }, + }, + })), + }, + }); +}); + +class PlainStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack'); + } +} + +class FileAssetApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new Stack(this, 'Stack'); + new s3_assets.Asset(stack, 'Asset', { + path: path.join(__dirname, 'test-file-asset.txt'), + }); + } +} + +class TwoFileAssetsApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new Stack(this, 'Stack'); + new s3_assets.Asset(stack, 'Asset1', { + path: path.join(__dirname, 'test-file-asset.txt'), + }); + new s3_assets.Asset(stack, 'Asset2', { + path: path.join(__dirname, 'test-file-asset-two.txt'), + }); + } +} + +class DockerAssetApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new Stack(this, 'Stack'); + new ecr_assets.DockerImageAsset(stack, 'Asset', { + directory: path.join(__dirname, 'test-docker-asset'), + }); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/pipeline.test.ts b/packages/@aws-cdk/pipelines/test/pipeline.test.ts new file mode 100644 index 0000000000000..2196726c91180 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/pipeline.test.ts @@ -0,0 +1,286 @@ +import { arrayWith, deepObjectLike, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import { Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { anything, encodedJson, stringLike } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, stackTemplate, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('references stack template in subassembly', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'App')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'App', + Actions: arrayWith( + objectLike({ + Name: 'Stack.Prepare', + InputArtifacts: [objectLike({})], + Configuration: objectLike({ + StackName: 'App-Stack', + TemplatePath: stringLike('*::assembly-App/*.template.json'), + }), + }), + ), + }), + }); +}); + +// tslint:disable: max-line-length +test('action has right settings for same-env deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'Same')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Same', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'Same-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'Same-Stack', + }), + }), + ], + }), + }); +}); + +test('action has right settings for cross-account deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'CrossAccount', { env: { account: 'you' }})); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'CrossAccount', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'CrossAccount-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-cfn-exec-role-you-${AWS::Region}' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'CrossAccount-Stack', + }), + }), + ], + }), + }); +}); + +test('action has right settings for cross-region deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'CrossRegion', { env: { region: 'elsewhere' }})); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'CrossRegion', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossRegion-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-elsewhere' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossRegion-Stack', + }), + }), + ], + }), + }); +}); + +test('action has right settings for cross-account/cross-region deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'CrossBoth', { env: { account: 'you', region: 'elsewhere' }})); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'CrossBoth', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossBoth-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-cfn-exec-role-you-elsewhere' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossBoth-Stack', + }), + }), + ], + }), + }); +}); + +// tslint:enable: max-line-length + +test('pipeline has self-mutation stage', () => { + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'UpdatePipeline', + Actions: [ + objectLike({ + Name: 'SelfMutate', + Configuration: objectLike({ + ProjectName: { Ref: anything() }, + }), + }), + ], + }), + }); + + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + install: { + commands: 'npm install -g aws-cdk', + }, + build: { + commands: arrayWith('cdk -a . deploy PipelineStack --require-approval=never --verbose'), + }, + }, + })), + Type: 'CODEPIPELINE', + }, + }); +}); + +test('selfmutation stage correctly identifies nested assembly of pipeline stack', () => { + const pipelineStage = new Stage(app, 'PipelineStage'); + const nestedPipelineStack = new Stack(pipelineStage, 'PipelineStack', { env: PIPELINE_ENV }); + new TestGitHubNpmPipeline(nestedPipelineStack, 'Cdk'); + + // THEN + expect(stackTemplate(nestedPipelineStack)).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: arrayWith('cdk -a assembly-PipelineStage deploy PipelineStage-PipelineStack --require-approval=never --verbose'), + }, + }, + })), + }, + }); +}); + +test('overridden stack names are respected', () => { + // WHEN + pipeline.addApplicationStage(new OneStackAppWithCustomName(app, 'App1')); + pipeline.addApplicationStage(new OneStackAppWithCustomName(app, 'App2')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith( + { + Name: 'App1', + Actions: arrayWith(objectLike({ + Name: 'MyFancyStack.Prepare', + Configuration: objectLike({ + StackName: 'MyFancyStack', + }), + })), + }, + { + Name: 'App2', + Actions: arrayWith(objectLike({ + Name: 'MyFancyStack.Prepare', + Configuration: objectLike({ + StackName: 'MyFancyStack', + }), + })), + }, + ), + }); +}); + +test('can control fix/CLI version used in pipeline selfupdate', () => { + // WHEN + const stack2 = new Stack(app, 'Stack2', { env: PIPELINE_ENV }); + new TestGitHubNpmPipeline(stack2, 'Cdk2', { + pipelineName: 'vpipe', + cdkCliVersion: '1.2.3', + }); + + // THEN + expect(stack2).toHaveResourceLike('AWS::CodeBuild::Project', { + Name: 'vpipe-selfupdate', + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + install: { + commands: 'npm install -g aws-cdk@1.2.3', + }, + }, + })), + }, + }); +}); + +class OneStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack'); + } +} + +class OneStackAppWithCustomName extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack', { + stackName: 'MyFancyStack', + }); + } +} diff --git a/packages/@aws-cdk/pipelines/test/stack-ordering.test.ts b/packages/@aws-cdk/pipelines/test/stack-ordering.test.ts new file mode 100644 index 0000000000000..e755572c78544 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/stack-ordering.test.ts @@ -0,0 +1,83 @@ +import { arrayWith, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import { App, Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { sortedByRunOrder } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: App; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +test('interdependent stacks are in the right order', () => { + // WHEN + pipeline.addApplicationStage(new TwoStackApp(app, 'MyApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'MyApp', + Actions: sortedByRunOrder([ + objectLike({ Name: 'Stack1.Prepare' }), + objectLike({ Name: 'Stack1.Deploy' }), + objectLike({ Name: 'Stack2.Prepare' }), + objectLike({ Name: 'Stack2.Deploy' }), + ]), + }), + }); +}); + +test('multiple independent stacks go in parallel', () => { + // WHEN + pipeline.addApplicationStage(new ThreeStackApp(app, 'MyApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'MyApp', + Actions: sortedByRunOrder([ + // 1 and 2 in parallel + objectLike({ Name: 'Stack1.Prepare' }), + objectLike({ Name: 'Stack2.Prepare' }), + objectLike({ Name: 'Stack1.Deploy' }), + objectLike({ Name: 'Stack2.Deploy' }), + // Then 3 + objectLike({ Name: 'Stack3.Prepare' }), + objectLike({ Name: 'Stack3.Deploy' }), + ]), + }), + }); +}); + +class TwoStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const stack2 = new BucketStack(this, 'Stack2'); + const stack1 = new BucketStack(this, 'Stack1'); + + stack2.addDependency(stack1); + } +} + +/** + * Three stacks where the last one depends on the earlier 2 + */ +class ThreeStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const stack1 = new BucketStack(this, 'Stack1'); + const stack2 = new BucketStack(this, 'Stack2'); + const stack3 = new BucketStack(this, 'Stack3'); + + stack3.addDependency(stack1); + stack3.addDependency(stack2); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile b/packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile new file mode 100644 index 0000000000000..d67ab4b1cc12c --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile @@ -0,0 +1,2 @@ +FROM scratch +RUN touch built.txt \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/test-file-asset-two.txt b/packages/@aws-cdk/pipelines/test/test-file-asset-two.txt new file mode 100644 index 0000000000000..8b1c7231bf2f4 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/test-file-asset-two.txt @@ -0,0 +1 @@ +Here's a second file asset. \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/test-file-asset.txt b/packages/@aws-cdk/pipelines/test/test-file-asset.txt new file mode 100644 index 0000000000000..95e9dcd2e3bf0 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/test-file-asset.txt @@ -0,0 +1 @@ +This is a file asset that's just here for kicks. \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/testmatchers.ts b/packages/@aws-cdk/pipelines/test/testmatchers.ts new file mode 100644 index 0000000000000..d2279fb4383b4 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/testmatchers.ts @@ -0,0 +1,121 @@ +import { exactValue, InspectionFailure, PropertyMatcher } from '@aws-cdk/assert'; + +/** + * Do a glob-like pattern match (which only supports *s) + */ +export function stringLike(pattern: string): PropertyMatcher { + // Replace * with .* in the string, escape the rest and brace with ^...$ + const regex = new RegExp(`^${pattern.split('*').map(escapeRegex).join('.*')}$`); + + return annotate({ $stringContaining: pattern }, (value: any, failure: InspectionFailure) => { + if (typeof value !== 'string') { + failure.failureReason = `Expected a string, but got '${typeof value}'`; + return false; + } + + if (!regex.test(value)) { + failure.failureReason = 'String did not match pattern'; + return false; + } + + return true; + }); +} + +/** + * Matches any value + */ +export function anything(): PropertyMatcher { + return annotate({ $anything: true }, () => true); +} + +/** + * Negate an inner matcher + */ +export function notMatching(matcher: any): PropertyMatcher { + return annotate({ $notMatching: matcher }, (value: any, failure: InspectionFailure) => { + const result = makeMatcher(matcher)(value, failure); + if (result) { + failure.failureReason = 'Should not have matched, but did'; + return false; + } + return true; + }); +} + +/** + * Sort an array (of Actions) by their RunOrder field before applying a matcher. + * + * Makes the matcher independent of the order in which the Actions get synthed + * to the template. Elements with the same RunOrder will be sorted by name. + */ +export function sortedByRunOrder(matcher: any): PropertyMatcher { + return annotate({ $sortedByRunOrder: matcher }, (value: any, failure: InspectionFailure) => { + if (!Array.isArray(value)) { + failure.failureReason = `Expected an Array, but got '${typeof value}'`; + return false; + } + + value = value.slice(); + + value.sort((a: any, b: any) => { + if (a.RunOrder !== b.RunOrder) { return a.RunOrder - b.RunOrder; } + return (a.Name as string).localeCompare(b.Name); + }); + + return makeMatcher(matcher)(value, failure); + }); +} + +/** + * Match on the innards of a JSON string, instead of the complete string + */ +export function encodedJson(matcher: any): PropertyMatcher { + return annotate({ $encodedJson: matcher }, (value: any, failure: InspectionFailure) => { + if (typeof value !== 'string') { + failure.failureReason = `Expected a string, but got '${typeof value}'`; + return false; + } + + let decoded; + try { + decoded = JSON.parse(value); + } catch (e) { + failure.failureReason = `String is not JSON: ${e}`; + return false; + } + + return makeMatcher(matcher)(decoded, failure); + }); +} + +function escapeRegex(s: string) { + return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Whether a value is a callable + */ +function isCallable(x: any): x is ((...args: any[]) => any) { + return x && {}.toString.call(x) === '[object Function]'; +} + +/** + * Turn a matcher or literal into a matcher + * + * Unfortunately I forgot to make the match() function public, so I can only accept matcher functions, not literals. + * However I can transform a literal into a matcher by using `exactValue`. + */ +function makeMatcher(matcher: any): PropertyMatcher { + return isCallable(matcher) ? matcher : exactValue(matcher); +} + +/** + * This should also have been in the upstream library + * + * Annotate a matcher with toJSON + */ +function annotate(how: A, matcher: PropertyMatcher): PropertyMatcher { + (matcher as any).toJSON = () => how; + return matcher; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/testutil.ts b/packages/@aws-cdk/pipelines/test/testutil.ts new file mode 100644 index 0000000000000..9c87e64c502b7 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/testutil.ts @@ -0,0 +1,106 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as s3 from '@aws-cdk/aws-s3'; +import { App, AppProps, Construct, Environment, SecretValue, Stack, StackProps, Stage } from '@aws-cdk/core'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as cdkp from '../lib'; +import { assemblyBuilderOf } from '../lib/private/construct-internals'; + +export const PIPELINE_ENV: Environment = { + account: '123pipeline', + region: 'us-pipeline', +}; + +export class TestApp extends App { + constructor(props?: Partial) { + super({ + context: { + '@aws-cdk/core:newStyleStackSynthesis': '1', + }, + stackTraces: false, + autoSynth: false, + runtimeInfo: false, + treeMetadata: false, + ...props, + }); + } + + public cleanup() { + rimraf(assemblyBuilderOf(this).outdir); + } +} + +export class TestGitHubNpmPipeline extends cdkp.CdkPipeline { + public readonly sourceArtifact: codepipeline.Artifact; + public readonly cloudAssemblyArtifact: codepipeline.Artifact; + + constructor(scope: Construct, id: string, props?: Partial & { readonly sourceArtifact?: codepipeline.Artifact } ) { + const sourceArtifact = props?.sourceArtifact ?? new codepipeline.Artifact(); + const cloudAssemblyArtifact = props?.cloudAssemblyArtifact ?? new codepipeline.Artifact(); + + super(scope, id, { + sourceAction: new codepipeline_actions.GitHubSourceAction({ + actionName: 'GitHub', + output: sourceArtifact, + oauthToken: SecretValue.plainText('$3kr1t'), + owner: 'test', + repo: 'test', + trigger: codepipeline_actions.GitHubTrigger.POLL, + }), + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + }), + cloudAssemblyArtifact, + ...props, + }); + + this.sourceArtifact = sourceArtifact; + this.cloudAssemblyArtifact = cloudAssemblyArtifact; + } +} + +/** + * A test stack + * + * It contains a single Bucket. Such robust. Much uptime. + */ +export class BucketStack extends Stack { + public readonly bucket: s3.IBucket; + + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + this.bucket = new s3.Bucket(this, 'Bucket'); + } +} + +/** + * rm -rf reimplementation, don't want to depend on an NPM package for this + */ +export function rimraf(fsPath: string) { + try { + const isDir = fs.lstatSync(fsPath).isDirectory(); + + if (isDir) { + for (const file of fs.readdirSync(fsPath)) { + rimraf(path.join(fsPath, file)); + } + fs.rmdirSync(fsPath); + } else { + fs.unlinkSync(fsPath); + } + } catch (e) { + // We will survive ENOENT + if (e.code !== 'ENOENT') { throw e; } + } +} + +/** + * Because 'expect(stack)' doesn't work correctly for stacks in nested assemblies + */ +export function stackTemplate(stack: Stack) { + const stage = Stage.of(stack); + if (!stage) { throw new Error('stack not in a Stage'); } + return stage.synth().getStackArtifact(stack.artifactId); +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/validation.test.ts b/packages/@aws-cdk/pipelines/test/validation.test.ts new file mode 100644 index 0000000000000..e2c16e8e67bfc --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/validation.test.ts @@ -0,0 +1,178 @@ +import { _objectContaining, arrayWith, deepObjectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { CfnOutput, Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { anything, encodedJson } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; +let sourceArtifact: codepipeline.Artifact; +let cloudAssemblyArtifact: codepipeline.Artifact; +let integTestArtifact: codepipeline.Artifact; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + sourceArtifact = new codepipeline.Artifact(); + cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); + integTestArtifact = new codepipeline.Artifact('IntegTests'); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + additionalArtifacts: [{ directory: 'test', artifact: integTestArtifact }], + }), + }); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('can use stack outputs as validation inputs', () => { + // GIVEN + const stage = new AppWithStackOutput(app, 'MyApp'); + + // WHEN + const pipeStage = pipeline.addApplicationStage(stage); + pipeStage.addActions(new cdkp.ShellScriptAction({ + actionName: 'TestOutput', + useOutputs: { + BUCKET_NAME: pipeline.stackOutput(stage.output), + }, + commands: ['echo $BUCKET_NAME'], + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'MyApp', + Actions: arrayWith( + deepObjectLike({ + Name: 'Stack.Deploy', + OutputArtifacts: [{ Name: anything() }], + Configuration: { + OutputFileName: 'outputs.json', + }, + }), + deepObjectLike({ + ActionTypeId: { + Provider: 'CodeBuild', + }, + Configuration: { + ProjectName: anything(), + }, + InputArtifacts: [{ Name: anything() }], + Name: 'TestOutput', + }), + ), + }), + }); + + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: [ + 'set -eu', + 'export BUCKET_NAME="$(node -pe \'require(process.env.CODEBUILD_SRC_DIR + "/outputs.json")["BucketName"]\')"', + 'echo $BUCKET_NAME', + ], + }, + }, + })), + Type: 'CODEPIPELINE', + }, + }); +}); + +test('can use additional files from source', () => { + // WHEN + pipeline.addStage('Test').addActions(new cdkp.ShellScriptAction({ + actionName: 'UseSources', + additionalArtifacts: [sourceArtifact], + commands: ['true'], + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Test', + Actions: [ + deepObjectLike({ + Name: 'UseSources', + InputArtifacts: [ { Name: 'Artifact_Source_GitHub' } ], + }), + ], + }), + }); + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: [ + 'set -eu', + 'true', + ], + }, + }, + })), + }, + }); +}); + +test('can use additional files from build', () => { + // WHEN + pipeline.addStage('Test').addActions(new cdkp.ShellScriptAction({ + actionName: 'UseBuildArtifact', + additionalArtifacts: [integTestArtifact], + commands: ['true'], + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Test', + Actions: [ + deepObjectLike({ + Name: 'UseBuildArtifact', + InputArtifacts: [ { Name: 'IntegTests' } ], + }), + ], + }), + }); + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: [ + 'set -eu', + 'true', + ], + }, + }, + })), + }, + }); +}); + +class AppWithStackOutput extends Stage { + public readonly output: CfnOutput; + + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new BucketStack(this, 'Stack'); + + this.output = new CfnOutput(stack, 'BucketName', { + value: stack.bucket.bucketName, + }); + } +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 19e878bfb3ea9..cfdf1d5e349b5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1901,6 +1901,11 @@ anymatch@^3.0.3: normalize-path "^3.0.0" picomatch "^2.0.4" +app-root-path@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.2.1.tgz#d0df4a682ee408273583d43f6f79e9892624bc9a" + integrity sha512-91IFKeKk7FjfmezPKkwtaRvSpnUc4gDwPAjA1YZ9Gn0q0PPeW+vbeUsZuyDwjI7+QTHhcLen2v25fi/AmhvbJA== + append-transform@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-1.0.0.tgz#046a52ae582a228bd72f58acfbe2967c678759ab" @@ -2129,7 +2134,7 @@ available-typed-arrays@^1.0.0, available-typed-arrays@^1.0.2: dependencies: array-filter "^1.0.0" -aws-sdk-mock@^5.1.0: +aws-sdk-mock@^5.0.0, aws-sdk-mock@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/aws-sdk-mock/-/aws-sdk-mock-5.1.0.tgz#6f2c0bd670d7f378c906a8dd806f812124db71aa" integrity sha512-Wa5eCSo8HX0Snqb7FdBylaXMmfrAWoWZ+d7MFhiYsgHPvNvMEGjV945FF2qqE1U0Tolr1ALzik1fcwgaOhqUWQ== @@ -2138,7 +2143,7 @@ aws-sdk-mock@^5.1.0: sinon "^9.0.1" traverse "^0.6.6" -aws-sdk@^2.637.0, aws-sdk@^2.707.0: +aws-sdk@^2.596.0, aws-sdk@^2.637.0, aws-sdk@^2.707.0: version "2.707.0" resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.707.0.tgz#c7506dadd1d60a73920ade9027645e166f444e25" integrity sha512-nt55Z9wQKFodOuwElF3222Thc3kDVnaC4rwemPEHIM1cVGPQe6E5yBfc6AwtYmSo6eoMMEWd6XO5wG2am9PW0w== @@ -3660,6 +3665,16 @@ dot-prop@^4.2.0: dependencies: is-obj "^1.0.0" +dotenv-json@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/dotenv-json/-/dotenv-json-1.0.0.tgz#fc7f672aafea04bed33818733b9f94662332815c" + integrity sha512-jAssr+6r4nKhKRudQ0HOzMskOFFi9+ubXWwmrSGJFgTvpjyPXCXsCsYbjif6mXp7uxA7xY3/LGaiTQukZzSbOQ== + +dotenv@^8.0.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" + integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== + dotgitignore@2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/dotgitignore/-/dotgitignore-2.1.0.tgz#a4b15a4e4ef3cf383598aaf1dfa4a04bcc089b7b" @@ -3842,6 +3857,11 @@ escodegen@1.x.x, escodegen@^1.11.1: optionalDependencies: source-map "~0.6.1" +eslint-config-standard@^14.1.0: + version "14.1.1" + resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-14.1.1.tgz#830a8e44e7aef7de67464979ad06b406026c56ea" + integrity sha512-Z9B+VR+JIXRxz21udPTL9HpFMyoMUEeX1G251EQ6e05WD9aPVtVBn09XUmZ259wCMlCDmYDSZG62Hhm+ZTJcUg== + eslint-import-resolver-node@^0.3.3, eslint-import-resolver-node@^0.3.4: version "0.3.4" resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717" @@ -3869,7 +3889,15 @@ eslint-module-utils@^2.6.0: debug "^2.6.9" pkg-dir "^2.0.0" -eslint-plugin-import@^2.22.0: +eslint-plugin-es@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-2.0.0.tgz#0f5f5da5f18aa21989feebe8a73eadefb3432976" + integrity sha512-f6fceVtg27BR02EYnBhgWLFQfK6bN4Ll0nQFrBHOlCsAyxeZkn0NHns5O0YZOPrV1B3ramd6cgFwaoFLcSkwEQ== + dependencies: + eslint-utils "^1.4.2" + regexpp "^3.0.0" + +eslint-plugin-import@^2.19.1, eslint-plugin-import@^2.22.0: version "2.22.0" resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz#92f7736fe1fde3e2de77623c838dd992ff5ffb7e" integrity sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg== @@ -3888,6 +3916,28 @@ eslint-plugin-import@^2.22.0: resolve "^1.17.0" tsconfig-paths "^3.9.0" +eslint-plugin-node@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-10.0.0.tgz#fd1adbc7a300cf7eb6ac55cf4b0b6fc6e577f5a6" + integrity sha512-1CSyM/QCjs6PXaT18+zuAXsjXGIGo5Rw630rSKwokSs2jrYURQc4R5JZpoanNCqwNmepg+0eZ9L7YiRUJb8jiQ== + dependencies: + eslint-plugin-es "^2.0.0" + eslint-utils "^1.4.2" + ignore "^5.1.1" + minimatch "^3.0.4" + resolve "^1.10.1" + semver "^6.1.0" + +eslint-plugin-promise@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz#845fd8b2260ad8f82564c1222fce44ad71d9418a" + integrity sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw== + +eslint-plugin-standard@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz#ff0519f7ffaff114f76d1bd7c3996eef0f6e20b4" + integrity sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ== + eslint-scope@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.0.0.tgz#e87c8887c73e8d1ec84f1ca591645c358bfc8fb9" @@ -3896,7 +3946,7 @@ eslint-scope@^5.0.0: esrecurse "^4.1.0" estraverse "^4.1.1" -eslint-utils@^1.4.3: +eslint-utils@^1.4.2, eslint-utils@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f" integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q== @@ -4954,6 +5004,11 @@ ignore@^4.0.3, ignore@^4.0.6: resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== +ignore@^5.1.1: + version "5.1.8" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" + integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== + immediate@~3.0.5: version "3.0.6" resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" @@ -5916,7 +5971,7 @@ jest-worker@^25.5.0: merge-stream "^2.0.0" supports-color "^7.0.0" -jest@^25.4.0, jest@^25.5.2, jest@^25.5.3, jest@^25.5.4: +jest@^25.4.0, jest@^25.5.0, jest@^25.5.2, jest@^25.5.3, jest@^25.5.4: version "25.5.4" resolved "https://registry.yarnpkg.com/jest/-/jest-25.5.4.tgz#f21107b6489cfe32b076ce2adcadee3587acb9db" integrity sha512-hHFJROBTqZahnO+X+PMtT6G2/ztqAZJveGqz//FnWWHurizkD05PQGzRZOhF3XP6z7SJmL+5tCfW8qV06JypwQ== @@ -6201,6 +6256,24 @@ kleur@^3.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== +lambda-leak@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lambda-leak/-/lambda-leak-2.0.0.tgz#771985d3628487f6e885afae2b54510dcfb2cd7e" + integrity sha1-dxmF02KEh/boha+uK1RRDc+yzX4= + +lambda-tester@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/lambda-tester/-/lambda-tester-3.6.0.tgz#ceb7d4f4f0da768487a05cff37dcd088508b5247" + integrity sha512-F2ZTGWCLyIR95o/jWK46V/WnOCFAEUG/m/V7/CLhPJ7PCM+pror1rZ6ujP3TkItSGxUfpJi0kqwidw+M/nEqWw== + dependencies: + app-root-path "^2.2.1" + dotenv "^8.0.0" + dotenv-json "^1.0.0" + lambda-leak "^2.0.0" + semver "^6.1.1" + uuid "^3.3.2" + vandium-utils "^1.1.1" + lazystream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.0.tgz#f6995fe0f820392f61396be89462407bb77168e4" @@ -6806,7 +6879,7 @@ mkdirp@*, mkdirp@1.x: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mkdirp@^0.5.0, mkdirp@^0.5.1: +mkdirp@0.x, mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== @@ -6937,6 +7010,17 @@ nise@^4.0.1: just-extend "^4.0.2" path-to-regexp "^1.7.0" +nock@^11.7.0: + version "11.9.1" + resolved "https://registry.yarnpkg.com/nock/-/nock-11.9.1.tgz#2b026c5beb6d0dbcb41e7e4cefa671bc36db9c61" + integrity sha512-U5wPctaY4/ar2JJ5Jg4wJxlbBfayxgKbiAeGh+a1kk6Pwnc2ZEuKviLyDSG6t0uXl56q7AALIxoM6FJrBSsVXA== + dependencies: + debug "^4.1.0" + json-stringify-safe "^5.0.1" + lodash "^4.17.13" + mkdirp "^0.5.0" + propagate "^2.0.0" + nock@^13.0.0: version "13.0.0" resolved "https://registry.yarnpkg.com/nock/-/nock-13.0.0.tgz#e07276d558245386a2872cebf4d5570583c0d225" @@ -8307,7 +8391,7 @@ resolve@1.1.7: resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= -resolve@^1.1.6, resolve@^1.10.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.17.0, resolve@^1.3.2: +resolve@^1.1.6, resolve@^1.10.0, resolve@^1.10.1, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.17.0, resolve@^1.3.2: version "1.17.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== @@ -8458,6 +8542,11 @@ semver-intersect@^1.4.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== +semver@6.x, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + semver@7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/semver/-/semver-7.1.1.tgz#29104598a197d6cbe4733eeecbe968f7b43a9667" @@ -8468,11 +8557,6 @@ semver@7.x, semver@^7.2.2, semver@^7.3.2: resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.2.tgz#604962b052b81ed0786aae84389ffba70ffd3938" integrity sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ== -semver@^6.0.0, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" @@ -9446,6 +9530,22 @@ trivial-deferred@^1.0.1: resolved "https://registry.yarnpkg.com/trivial-deferred/-/trivial-deferred-1.0.1.tgz#376d4d29d951d6368a6f7a0ae85c2f4d5e0658f3" integrity sha1-N21NKdlR1jaKb3oK6FwvTV4GWPM= +ts-jest@^25.3.1: + version "25.5.1" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-25.5.1.tgz#2913afd08f28385d54f2f4e828be4d261f4337c7" + integrity sha512-kHEUlZMK8fn8vkxDjwbHlxXRB9dHYpyzqKIGDNxbzs+Rz+ssNDSDNusEK8Fk/sDd4xE6iKoQLfFkFVaskmTJyw== + dependencies: + bs-logger "0.x" + buffer-from "1.x" + fast-json-stable-stringify "2.x" + json5 "2.x" + lodash.memoize "4.x" + make-error "1.x" + micromatch "4.x" + mkdirp "0.x" + semver "6.x" + yargs-parser "18.x" + ts-jest@^26.1.1: version "26.1.1" resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-26.1.1.tgz#b98569b8a4d4025d966b3d40c81986dd1c510f8d" @@ -9795,6 +9895,11 @@ validate-npm-package-name@^3.0.0: dependencies: builtins "^1.0.3" +vandium-utils@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/vandium-utils/-/vandium-utils-1.2.0.tgz#44735de4b7641a05de59ebe945f174e582db4f59" + integrity sha1-RHNd5LdkGgXeWevpRfF05YLbT1k= + verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" From cfa01f4ddebeb6a0c8d2694476399e6a6a0ce676 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Thu, 2 Jul 2020 16:46:24 +0200 Subject: [PATCH 2/9] pkglint fix --- packages/@aws-cdk/pipelines/.npmignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/@aws-cdk/pipelines/.npmignore b/packages/@aws-cdk/pipelines/.npmignore index 174864d493a79..fe4df9a06d9a9 100644 --- a/packages/@aws-cdk/pipelines/.npmignore +++ b/packages/@aws-cdk/pipelines/.npmignore @@ -19,3 +19,6 @@ dist tsconfig.json .eslintrc.js + +# exclude cdk artifacts +**/cdk.out \ No newline at end of file From abe09ef2db7799001245bd1b74afa7491808faf6 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Mon, 6 Jul 2020 11:53:13 +0200 Subject: [PATCH 3/9] Update readme --- .../aws-codepipeline-actions/README.md | 53 ++++++----- packages/@aws-cdk/pipelines/README.md | 92 ++++++++++++------- 2 files changed, 89 insertions(+), 56 deletions(-) diff --git a/packages/@aws-cdk/aws-codepipeline-actions/README.md b/packages/@aws-cdk/aws-codepipeline-actions/README.md index 327a7fd289809..0c4a3908761eb 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/README.md +++ b/packages/@aws-cdk/aws-codepipeline-actions/README.md @@ -14,9 +14,9 @@ import * as codepipeline from '@aws-cdk/aws-codepipeline'; import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; ``` -### Sources +## Sources -#### AWS CodeCommit +### AWS CodeCommit To use a CodeCommit Repository in a CodePipeline: @@ -62,7 +62,14 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### GitHub +### GitHub + +If you want to use a GitHub repository as the source, you must create: + +* A [GitHub Access Token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) +* A [Secrets Manager PlainText Secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_create-basic-secret.html) + with the value of the **GitHub Access Token**. Pick whatever name you want + (for example `my-github-token`) and pass it as the argument of `oauthToken`. To use GitHub as the source of a CodePipeline: @@ -104,7 +111,7 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### BitBucket +### BitBucket CodePipeline can use a BitBucket Git repository as a source: @@ -135,7 +142,7 @@ const sourceAction = new codepipeline_actions.BitBucketSourceAction({ the above class `BitBucketSourceAction` is experimental - we reserve the right to make breaking changes to it. -#### AWS S3 +### AWS S3 To use an S3 Bucket as a source in CodePipeline: @@ -205,7 +212,7 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### AWS ECR +### AWS ECR To use an ECR Repository as a source in a Pipeline: @@ -246,9 +253,9 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -### Build & test +## Build & test -#### AWS CodeBuild +### AWS CodeBuild Example of a CodeBuild Project used in a Pipeline, alongside CodeCommit: @@ -301,7 +308,7 @@ const testAction = new codepipeline_actions.CodeBuildAction({ }); ``` -##### Multiple inputs and outputs +#### Multiple inputs and outputs When you want to have multiple inputs and/or outputs for a Project used in a Pipeline, instead of using the `secondarySources` and `secondaryArtifacts` @@ -375,7 +382,7 @@ const project = new codebuild.PipelineProject(this, 'MyProject', { }); ``` -##### Variables +#### Variables The CodeBuild action emits variables. Unlike many other actions, the variables are not static, @@ -399,7 +406,7 @@ const buildAction = new codepipeline_actions.CodeBuildAction({ build: { commands: 'export MY_VAR="some value"', }, - }, + }, }), }), variablesNamespace: 'MyNamespace', // optional - by default, a name will be generated for you @@ -417,7 +424,7 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### Jenkins +### Jenkins In order to use Jenkins Actions in the Pipeline, you first need to create a `JenkinsProvider`: @@ -459,9 +466,9 @@ const buildAction = new codepipeline_actions.JenkinsAction({ }); ``` -### Deploy +## Deploy -#### AWS CloudFormation +### AWS CloudFormation This module contains Actions that allows you to deploy to CloudFormation from AWS CodePipeline. @@ -497,7 +504,7 @@ using a CloudFormation CodePipeline Action. Example: [Example of deploying a Lambda through CodePipeline](test/integ.lambda-deployed-through-codepipeline.lit.ts) -##### Cross-account actions +#### Cross-account actions If you want to update stacks in a different account, pass the `account` property when creating the action: @@ -534,9 +541,9 @@ new codepipeline_actions.CloudFormationCreateUpdateStackAction({ }); ``` -#### AWS CodeDeploy +### AWS CodeDeploy -##### Server deployments +#### Server deployments To use CodeDeploy for EC2/on-premise deployments in a Pipeline: @@ -589,7 +596,7 @@ where you will define your Pipeline, and deploy the `lambdaStack` using a CloudFormation CodePipeline Action (see above for a complete example). -#### ECS +### ECS CodePipeline can deploy an ECS service. The deploy Action receives one input Artifact which contains the [image definition file]: @@ -616,7 +623,7 @@ const deployStage = pipeline.addStage({ [image definition file]: https://docs.aws.amazon.com/codepipeline/latest/userguide/pipelines-create.html#pipelines-create-image-definitions -#### AWS S3 +### AWS S3 To use an S3 Bucket as a deployment target in CodePipeline: @@ -636,7 +643,7 @@ const deployStage = pipeline.addStage({ }); ``` -#### Alexa Skill +### Alexa Skill You can deploy to Alexa using CodePipeline with the following Action: @@ -687,9 +694,9 @@ new codepipeline_actions.AlexaSkillDeployAction({ }); ``` -### Approve & invoke +## Approve & invoke -#### Manual approval Action +### Manual approval Action This package contains an Action that stops the Pipeline until someone manually clicks the approve button: @@ -712,7 +719,7 @@ but `notifyEmails` were, a new SNS Topic will be created (and accessible through the `notificationTopic` property of the Action). -#### AWS Lambda +### AWS Lambda This module contains an Action that allows you to invoke a Lambda function in a Pipeline: diff --git a/packages/@aws-cdk/pipelines/README.md b/packages/@aws-cdk/pipelines/README.md index 8bb40c1b863c5..048957653aa82 100644 --- a/packages/@aws-cdk/pipelines/README.md +++ b/packages/@aws-cdk/pipelines/README.md @@ -21,7 +21,10 @@ A construct library for painless Continuous Delivery of CDK applications. Defining a pipeline for your application is as simple as defining a subclass of `Stage`, and calling `pipeline.addApplicationStage()` with instances of that class. Deploying to a different account or region looks exactly the -same, the *CDK Pipelines* library takes care the differences. +same, the *CDK Pipelines* library takes care of the details. + +(Note that have to *bootstrap* all environments before the following code +will work, see the section **CDK Environment Bootstrapping** below). ```ts import { Construct, Stage } from '@aws-cdk/core'; @@ -75,10 +78,7 @@ stacks. ## CDK Versioning -This library requires exactly CDK version `1.45.0`. The rest of your application must -use the same version. - -It uses prerelease features of the CDK framework, which can be enabled by adding the +This library uses prerelease features of the CDK framework, which can be enabled by adding the following to `cdk.json`: ``` @@ -91,7 +91,7 @@ following to `cdk.json`: ``` When bootstrapping, the environment variable `CDK_NEW_BOOTSTRAP=1` should be -set (see the section called **CDK Bootstrapping**). +set (see the section called **CDK Environment Bootstrapping**). ## Defining the Pipeline (Source and Synth) @@ -126,7 +126,6 @@ class MyPipelineStack extends Stack { synthAction: SimpleSynthAction.standardNpmSynth({ sourceArtifact, cloudAssemblyArtifact, - projectName: 'MyAppPipeline-synth', // Use this if you need a build step (if you're not using ts-node // or if you have TypeScript Lambdas that need to be compiled). @@ -145,8 +144,10 @@ new MyPipelineStack(this, 'PipelineStack', { }); ``` +## Initial pipeline deployment + You provision this pipeline by making sure the target environment has been -bootstrapped (see below), and then executing `cdk deploy PipelineStack` +bootstrapped (see below), and then executing deploying the `PipelineStack` *once*. Afterwards, the pipeline will keep itself up-to-date. > **Important**: be sure to `git commit` and `git push` before deploying the @@ -156,37 +157,56 @@ bootstrapped (see below), and then executing `cdk deploy PipelineStack` > right away based on the sources in the repository, so the sources it finds > in there should be the ones you want it to find. -### Sources +Run the following commands to get the pipeline going: -Any of the regular sources from the `@aws-cdk/aws-codepipeline-actions` module can be used. +``` +$ git commit -a +$ git push +$ cdk deploy PipelineStack +``` -#### GitHub +Administrative permissions to the account are only necessary up until +this point. We recommend you shed access to these credentials after doing this. -If you want to use a GitHub repository as the source, you must also create: +### Sources -* A [GitHub Access Token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) -* A [Secrets Manager PlainText Secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_create-basic-secret.html) - with the value of the **GitHub Access Token**. Pick whatever name you want - (for example `github-token`) and pass it as the argument of `oauthToken`. +Any of the regular sources from the [`@aws-cdk/aws-codepipeline-actions`](https://docs.aws.amazon.com/cdk/api/latest/docs/aws-codepipeline-actions-readme.html#github) module can be used. ### Synths -You define how to build and synth the project by specifying a `synthAction`. This can be -any CodePipeline action that produces an artifact with a CDK Cloud Assembly in it. Pass -the output artifact of the synth in the Pipeline's `cloudAssemblyArtifact` property. +You define how to build and synth the project by specifying a `synthAction`. +This can be any CodePipeline action that produces an artifact with a CDK +Cloud Assembly in it (the contents of the `cdk.out` directory created when +`cdk synth` is called). Pass the output artifact of the synth in the +Pipeline's `cloudAssemblyArtifact` property. `SimpleSynthAction` is available for synths that can be performed by running a couple -of simple shell commands (install, build, and synth). Available as factory functions -on `SimpleSynthAction` are some common convention-based synth: +of simple shell commands (install, build, and synth) using AWS CodeBuild. When +using these, the source repository does not need to have a `buildspec.yml`. An example +of using `SimpleSynthAction` to run a Maven build followed by a CDK synth: + +```ts +const pipeline = new CdkPipeline(this, 'Pipeline', { + // ... + synthAction: new SimpleSynthAction({ + sourceArtifact, + cloudAssemblyArtifact, + installCommand: 'npm install -g aws-cdk', + buildCommand: 'mvn package', + synthCommand: 'cdk synth', + }) +}); +``` + +Available as factory functions on `SimpleSynthAction` are some common +convention-based synth: * `SimpleSynthAction.standardNpmSynth()`: build using NPM conventions. Expects a `package-lock.json`, a `cdk.json`, and expects the CLI to be a versioned dependency in `package.json`. Does - not perform a build step by default. The source repository does not need to - have a `buildspec.yml`. + not perform a build step by default. * `CdkSynth.standardYarnSynth()`: build using Yarn conventions. Expects a `yarn.lock` a `cdk.json`, and expects the CLI to be a versioned dependency in `package.json`. Does - not perform a build step by default. The source repository does not need to - have a `buildspec.yml`. + not perform a build step by default. If you need a custom build/synth step that is not covered by `SimpleSynthAction`, you can always add a custom CodeBuild project and pass a corresponding `CodeBuildAction` to the @@ -229,17 +249,17 @@ pipeline.addApplicationStage(new MyApplication(this, 'Production', { Every *Application Stage* added by `addApplicationStage()` will lead to the addition of an individual *Pipeline Stage*, which is subsequently returned. You can add more -actions to the stage by calling `addCustomAction()` on it. For example: +actions to the stage by calling `addAction()` on it. For example: ```ts const testingStage = pipeline.addApplicationStage(new MyApplication(this, 'Testing', { env: { account: '111111111111', region: 'eu-west-1' } })); -// Add a custom action -- in this case, a Manual Approval action +// Add a action -- in this case, a Manual Approval action // (for illustration purposes: testingStage.addManualApprovalAction() is a // convenience shorthand that does the same) -testingStage.addCustomAction(new ManualApprovalAction({ +testingStage.addAction(new ManualApprovalAction({ actionName: 'ManualApproval', runOrder: testingStage.nextSequentialRunOrder(), })); @@ -393,11 +413,17 @@ const validationAction = new ShellScriptAction({ }); ``` -## CDK Bootstrapping +## CDK Environment Bootstrapping -An *environment* is an *(account, region)* pair where you want to deploy a CDK -stack (see [Environments](https://docs.aws.amazon.com/cdk/latest/guide/environments.html) -in the CDK Developer Guide). +An *environment* is an *(account, region)* pair where you want to deploy a +CDK stack (see +[Environments](https://docs.aws.amazon.com/cdk/latest/guide/environments.html) +in the CDK Developer Guide). In a Continuous Deployment pipeline, there are +at least two environments involved: the environment where the pipeline is +provisioned, and the environment where you want to deploy the application (or +different stages of the application). These can be the same, though best +practices recommend you isolate your different application stages from each +other in different AWS accounts or regions. Before you can provision the pipeline, you have to *bootstrap* the environment you want to create it in. If you are deploying your application to different environments, you @@ -522,7 +548,7 @@ they are all bootstrapped. Limitations that we are aware of and will address: * **No context queries**: context queries are not supported. That means that - Vpc.fromLookup() and other functions like it will not work. + Vpc.fromLookup() and other functions like it will not work [#8905](https://github.com/aws/aws-cdk/issues/8905). ## Known Issues From 460a86479aeec5a5ff6142f7ad239de00fd5c86c Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Mon, 6 Jul 2020 11:54:32 +0200 Subject: [PATCH 4/9] Add dependency to decdk --- packages/decdk/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/decdk/package.json b/packages/decdk/package.json index 59b0ad777f07e..6e698e76cb08f 100644 --- a/packages/decdk/package.json +++ b/packages/decdk/package.json @@ -28,6 +28,7 @@ "license": "Apache-2.0", "dependencies": { "@aws-cdk/alexa-ask": "0.0.0", + "@aws-cdk/pipelines": "0.0.0", "@aws-cdk/app-delivery": "0.0.0", "@aws-cdk/assets": "0.0.0", "@aws-cdk/aws-accessanalyzer": "0.0.0", From 0b34ed11ed5b1a3aa3fe7d270b3b8737ced9c01e Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Mon, 6 Jul 2020 13:30:21 +0200 Subject: [PATCH 5/9] Auto-switch to new-style bootstrapping --- packages/@aws-cdk/pipelines/README.md | 16 +++++++++++++--- packages/aws-cdk/bin/cdk.ts | 14 ++++++++++++-- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/packages/@aws-cdk/pipelines/README.md b/packages/@aws-cdk/pipelines/README.md index 048957653aa82..820dcdbdd0c4a 100644 --- a/packages/@aws-cdk/pipelines/README.md +++ b/packages/@aws-cdk/pipelines/README.md @@ -90,9 +90,6 @@ following to `cdk.json`: } ``` -When bootstrapping, the environment variable `CDK_NEW_BOOTSTRAP=1` should be -set (see the section called **CDK Environment Bootstrapping**). - ## Defining the Pipeline (Source and Synth) The pipeline is defined by instantiating `CdkPipeline` in a Stack. This defines the @@ -429,6 +426,19 @@ Before you can provision the pipeline, you have to *bootstrap* the environment y to create it in. If you are deploying your application to different environments, you also have to bootstrap those and be sure to add a *trust* relationship. +> This library requires a newer version of the bootstrapping stack which has +> been updated specifically to support cross-account continous delivery. In the future, +> this new bootstrapping stack will become the default, but for now it is still +> opt-in. +> +> The commands below assume you are running `cdk bootstrap` in a directory +> where `cdk.json` contains the `"@aws-cdk/core:newStyleStackSynthesis": true` +> setting in its context, which will switch to the new bootstrapping stack +> automatically. +> +> If run from another directory, be sure to run the bootstrap command with +> the environment variable `CDK_NEW_BOOTSTRAP=1` set. + To bootstrap an environment for provisioning the pipeline: ``` diff --git a/packages/aws-cdk/bin/cdk.ts b/packages/aws-cdk/bin/cdk.ts index c40c914714187..31383ce04729e 100644 --- a/packages/aws-cdk/bin/cdk.ts +++ b/packages/aws-cdk/bin/cdk.ts @@ -13,7 +13,7 @@ import { execProgram } from '../lib/api/cxapp/exec'; import { CdkToolkit } from '../lib/cdk-toolkit'; import { RequireApproval } from '../lib/diff'; import { availableInitLanguages, cliInit, printAvailableTemplates } from '../lib/init'; -import { data, debug, error, setLogLevel } from '../lib/logging'; +import { data, debug, error, print, setLogLevel } from '../lib/logging'; import { PluginHost } from '../lib/plugin'; import { serializeStructure } from '../lib/serialize'; import { Configuration, Settings } from '../lib/settings'; @@ -227,9 +227,19 @@ async function initCommandLine() { }); case 'bootstrap': + // Use new bootstrapping if it's requested via environment variable, or if + // new style stack synthesis has been configured in `cdk.json`. + let useNewBootstrapping = false; + if (process.env.CDK_NEW_BOOTSTRAP) { + print('CDK_NEW_BOOTSTRAP set, using new-style bootstrapping'); + useNewBootstrapping = true; + } else if (configuration.context.get(cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT)) { + print(`'${cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT}' context set, using new-style bootstrapping`); + } + return await cli.bootstrap(args.ENVIRONMENTS, toolkitStackName, args.roleArn, - !!process.env.CDK_NEW_BOOTSTRAP, + useNewBootstrapping, argv.force, { bucketName: configuration.settings.get(['toolkitBucket', 'bucketName']), From 47831271937e9aee49dcee3d38b0c443dd74e4a3 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Mon, 6 Jul 2020 13:31:36 +0200 Subject: [PATCH 6/9] Add to monocdk --- packages/monocdk-experiment/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/monocdk-experiment/package.json b/packages/monocdk-experiment/package.json index 71a5dd88067b3..dde22f6a52f6e 100644 --- a/packages/monocdk-experiment/package.json +++ b/packages/monocdk-experiment/package.json @@ -247,6 +247,7 @@ "@aws-cdk/core": "0.0.0", "@aws-cdk/custom-resources": "0.0.0", "@aws-cdk/cx-api": "0.0.0", + "@aws-cdk/pipelines": "0.0.0", "@aws-cdk/region-info": "0.0.0", "@types/fs-extra": "^8.1.1", "@types/node": "^10.17.26", From f5c141e12bfd95e3c7516b1af9b327187f446edc Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Wed, 8 Jul 2020 10:11:52 +0200 Subject: [PATCH 7/9] Properly flip flag to true --- packages/aws-cdk/bin/cdk.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/aws-cdk/bin/cdk.ts b/packages/aws-cdk/bin/cdk.ts index 31383ce04729e..79b90682dc5c4 100644 --- a/packages/aws-cdk/bin/cdk.ts +++ b/packages/aws-cdk/bin/cdk.ts @@ -235,6 +235,7 @@ async function initCommandLine() { useNewBootstrapping = true; } else if (configuration.context.get(cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT)) { print(`'${cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT}' context set, using new-style bootstrapping`); + useNewBootstrapping = true; } return await cli.bootstrap(args.ENVIRONMENTS, toolkitStackName, From ad4c1b2c24b7138819a16bb52941718212435724 Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Wed, 8 Jul 2020 11:14:42 +0200 Subject: [PATCH 8/9] Fix type tests, it's still to easy to end up with a non-deduped dep tree --- .../@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts | 4 +++- packages/@aws-cdk/pipelines/lib/stage.ts | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts b/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts index f4d880802348b..7517a02f35891 100644 --- a/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts +++ b/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts @@ -356,5 +356,7 @@ export interface FromStackArtifactOptions { } function isStackArtifact(a: cxapi.CloudArtifact): a is cxapi.CloudFormationStackArtifact { - return a instanceof cxapi.CloudFormationStackArtifact; + // instanceof is too risky, and we're at a too late stage to properly fix. + // return a instanceof cxapi.CloudFormationStackArtifact; + return a.constructor.name === 'CloudFormationStackArtifact'; } \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/stage.ts b/packages/@aws-cdk/pipelines/lib/stage.ts index 267bcb9543ef6..2441da072cede 100644 --- a/packages/@aws-cdk/pipelines/lib/stage.ts +++ b/packages/@aws-cdk/pipelines/lib/stage.ts @@ -299,7 +299,9 @@ function stripPrefix(s: string, prefix: string) { } function isAssetManifest(s: cxapi.CloudArtifact): s is cxapi.AssetManifestArtifact { - return s instanceof cxapi.AssetManifestArtifact; + // instanceof is too risky, and we're at a too late stage to properly fix. + // return s instanceof cxapi.AssetManifestArtifact; + return s.constructor.name === 'AssetManifestArtifact'; } /** From be1cf0bd66022990120aa90643900661d1d99d2f Mon Sep 17 00:00:00 2001 From: Rico Huijbers Date: Wed, 8 Jul 2020 11:40:45 +0200 Subject: [PATCH 9/9] Add another troubleshooting tip --- packages/@aws-cdk/pipelines/README.md | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/packages/@aws-cdk/pipelines/README.md b/packages/@aws-cdk/pipelines/README.md index 820dcdbdd0c4a..6b171a06dea98 100644 --- a/packages/@aws-cdk/pipelines/README.md +++ b/packages/@aws-cdk/pipelines/README.md @@ -530,7 +530,7 @@ Here are some common errors you may encounter while using this library. ### Pipeline: Internal Failure -If you see the following error: +If you see the following error during deployment of your pipeline: ``` CREATE_FAILED | AWS::CodePipeline::Pipeline | Pipeline/Pipeline @@ -542,7 +542,7 @@ right permissions to access the repository you're trying to access. ### Key: Policy contains a statement with one or more invalid principals -If you see the following error: +If you see the following error during deployment of your pipeline: ``` CREATE_FAILED | AWS::KMS::Key | Pipeline/Pipeline/ArtifactsBucketEncryptionKey @@ -553,6 +553,19 @@ One of the target (account, region) environments has not been bootstrapped with the new bootstrap stack. Check your target environments and make sure they are all bootstrapped. +### is in ROLLBACK_COMPLETE state and can not be updated. + +If you see the following error during execution of your pipeline: + +``` +Stack ... is in ROLLBACK_COMPLETE state and can not be updated. (Service: +AmazonCloudFormation; Status Code: 400; Error Code: ValidationError; Request +ID: ...) +``` + +The stack failed its previous deployment, and is in a non-retryable state. +Go into the CloudFormation console, delete the stack, and retry the deployment. + ## Current Limitations Limitations that we are aware of and will address: