-
Notifications
You must be signed in to change notification settings - Fork 1
/
build_deploy_test.yaml
130 lines (117 loc) · 5.47 KB
/
build_deploy_test.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
steps:
- name: "gcr.io/cloud-builders/curl"
entrypoint: /bin/bash
args:
- -c
- |
# source environment variables
export build_repo_name="cicd" &&
export build_branch_name="test" &&
export build_bazel_workspace=/workspace/${build_repo_name} &&
export build_staging_gcp_project_id="$(gcloud config list --format 'value(core.project)')" &&
export build_staging_project_number="$(gcloud projects describe '${build_staging_gcp_project_id}' --format='get(build_staging_project_number)')" &&
export build_staging_composer_env_name="staging-data-pipeline-composer3" &&
export build_staging_composer_region="us-central1" &&
export build_result_bucket_region="${build_composer_region}" &&
export build_composer_zone_id="us-central1-a" &&
export build_staging_composer_dag_bucket=$(gcloud composer environments describe build_staging_composer_env_name --location build_composer_region --format='get(config.dagGcsPrefix)') &&
export build_staging_composer_service_account=$(gcloud composer environments describe build_staging_composer_env_name --location $build_staging_composer_region --format='get(config.nodeConfig.serviceAccount)') &&
export build_composer_dag_name_test="test_lor_game_event" &&
export build_artifactbucket="gs://lor-data-platform-dev-gouri/staging/game-event/" &&
export build_artifactdir="${build_bazel_workspace}/bazel-bin/etls/evaluation" &&
export build_artifactrepo="artifact-repo" &&
export build_artifact_repo_region="us-central1" &&
# write all "build_" variables to the persistent volume "/workspace"
env | grep "^build_" > /workspace/build-utils/set_vars
id: "source-environment-variables"
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk:latest"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
git clone --single-branch --branch $build_branch_name https://github.com/pupamanyu/$build_repo_name
id: 'check-out-source-code'
- name: "gcr.io/cloud-builders/bazel:latest"
args: [
'build', '//etls/evaluation:game-event',"--verbose_failures",
]
id: 'build-debian-package'
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk:latest"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
sh $build_repo_name/build-utils/publish-artifact.sh $build_bazel_workspace $build_artifactbucket $build_artifactdir $build_artifactrepo $build_artifact_repo_region
id: 'publish-artifact'
- name: 'gcr.io/cloud-solutions-images/apache-airflow:1.10'
entrypoint: 'python'
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
python $build_repo_name/etls/evaluation/lor/game_event/workflow-dag/test_compare_xcom_maps.py $build_bazel_workspace $build_artifactbucket $build_artifactdir $build_artifactrepo $build_artifact_repo_region
id: 'unit-test-on-operator-code'
- name: "gcr.io/google.com/cloudsdktool/cloud-sdk:latest"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
sh $build_repo_name/build-utils/load_composer_var.sh
id: 'set-composer-variables'
- name: "gcr.io/cloud-builders/gsutil"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
sh $build_repo_name/etls/evaluation/lor/game_event/workflow-dag/compare_xcom_maps.py &build_staging_composer_dag_bucket
id: 'deploy-custom-operator'
- name: "gcr.io/cloud-builders/gsutil"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
cp gs://lor-data-platform-dev-gouri/staging/game-event/* $build_staging_composer_dag_bucket
id: 'deploy-processing-pipeline'
- name: "gcr.io/cloud-builders/gcloud"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
sh $build_repo_name/buildutils/wait_for_dag_deployed.sh $build_staging_composer_env_name $build_staging_composer_region build_composer_dag_name_test '6' '20'
id: 'wait-for-dag-deployed-on-composer'
- name: "gcr.io/cloud-builders/gcloud"
entrypoint: "/bin/bash"
args:
- -c
- |
# Read environment variables from disk
source /workspace/build-utils/set_vars &&
composer environments run $build_staging_composer_env_name --location $build_staging_composer_region trigger_dag -- build_composer_dag_name_test --run_id=$BUILD_ID
id: 'trigger-pipeline-execution'