From f171820d8eb28fe626b6b8b980723c03ea049bd4 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 5 Jan 2021 17:16:51 -0800 Subject: [PATCH 1/6] feat: adds cleaner utility function --- samples/test/clean.js | 246 ++++++++++++++++++ ...ning-pipeline-image-classification.test.js | 8 +- 2 files changed, 253 insertions(+), 1 deletion(-) create mode 100644 samples/test/clean.js diff --git a/samples/test/clean.js b/samples/test/clean.js new file mode 100644 index 00000000..04166b7c --- /dev/null +++ b/samples/test/clean.js @@ -0,0 +1,246 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * This module contains utility functions for removing unneeded, stale, or + * orphaned resources from test project. + * + * Removes: + * - Datasets + * - Training pipelines + * - Models + * - Endpoints + * - Batch prediction jobs + */ +const MAXIMUM_AGE = 3600000 * 24 * 2; // 2 days in milliseconds +const MAXIMUM_NUMBER_OF_DELETIONS = 10; // Avoid hitting LRO quota +const TEMP_RESOURCE_PREFIX = 'temp'; +const LOCATION = 'us-central1'; + +// All AI Platform resources need to specify a hostname. +const clientOptions = { + apiEndpoint: 'us-central1-aiplatform.googleapis.com', +}; + +/** + * Determines whether a resource should be deleted based upon its + * age and name. + * @param {string} displayName the display name of the resource + * @param {Timestamp} createTime when the resource is created + * @returns {bool} + */ +function checkDeletionStatus(displayName, createTime) { + const NOW = new Date(); + // Check whether this dataset is a temporary resource + if (displayName.indexOf(TEMP_RESOURCE_PREFIX) == -1) { + return false; + } + + // Check how old the dataset is + const ageOfResource = new Date(createTime.seconds * 1000); + if (NOW - ageOfResource < MAXIMUM_AGE) { + return false; + } + + return true; +} + +/** + * Removes all temporary datasets older than the maximum age. + * @param {string} projectId the project to remove datasets from + * @returns {Promise} + */ +async function cleanDatasets(projectId) { + const {DatasetServiceClient} = require("@google-cloud/aiplatform"); + const datasetServiceClient = new DatasetServiceClient(clientOptions); + + const [datasets] = await datasetServiceClient.listDatasets({ + parent: `projects/${projectId}/locations/${LOCATION}` + }); + + const datasetDeletionOperations = []; + for (const dataset of datasets) { + const {displayName, createTime, name} = dataset; + + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = datasetServiceClient.deleteDataset({ + name + }) + datasetDeletionOperations.push(deletionOp); + } + + if (datasetDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + + return Promise.all(datasetDeletionOperations); +} + +/** + * Removes all temporary training pipelines older than the maximum age. + * @param {string} projectId the project to remove pipelines from + * @returns {Promise} + */ +async function cleanTrainingPipelines(projectId) { + const {PipelineServiceClient} = require("@google-cloud/aiplatform"); + const pipelineServiceClient = new PipelineServiceClient(clientOptions); + + const [pipelines] = await pipelineServiceClient.listTrainingPipelines({ + parent: `projects/${projectId}/locations/${LOCATION}` + }); + + const pipelineDeletionOperations = []; + for (const pipeline of pipelines) { + const {displayName, createTime, name} = pipeline; + + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = pipelineServiceClient.deleteTrainingPipeline({ + name + }) + pipelineDeletionOperations.push(deletionOp); + } + + if (pipelineDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + return Promise.all(pipelineDeletionOperations); +} + +/** + * Removes all temporary models older than the maximum age. + * @param {string} projectId the project to remove models from + * @returns {Promise} + */ +async function cleanModels(projectId) { + const {ModelServiceClient, EndpointServiceClient} = require("@google-cloud/aiplatform"); + const modelServiceClient = new ModelServiceClient(clientOptions); + + const [models] = await modelServiceClient.listModels({ + parent: `projects/${projectId}/locations/${LOCATION}` + }); + + const modelDeletionOperations = []; + for (const model of models) { + const {displayName, createTime, deployedModels, name} = model; + + if (checkDeletionStatus(displayName, createTime)) { + // Need to check if model is deployed to an endpoint + // Undeploy the model everywhere it is deployed + for (const deployedModel of deployedModels) { + const {endpoint, deployedModelId} = deployedModel; + + const endpointServiceClient = new EndpointServiceClient(clientOptions); + await endpointServiceClient.undeployModel({ + endpoint, + deployedModelId + }); + } + + const deletionOp = modelServiceClient.deleteModel({ + name + }); + modelDeletionOperations.push(deletionOp); + } + + if (modelDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + return Promise.all(modelDeletionOperations); +} + +/** + * Removes all temporary endpoints older than the maximum age. + * @param {string} projectId the project to remove endpoints from + * @returns {Promise} + */ +async function cleanEndpoints(projectId) { + const {EndpointServiceClient} = require("@google-cloud/aiplatform"); + const endpointServiceClient = new EndpointServiceClient(clientOptions); + + const [endpoints] = await endpointServiceClient.listEndpoints({ + parent: `projects/${projectId}/locations/${LOCATION}` + }); + + const endpointDeletionOperations = []; + for (const endpoint of endpoints) { + const {displayName, createTime, name} = endpoint; + + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = endpointServiceClient.deleteEndpoint({ + name + }); + endpointDeletionOperations.push(deletionOp); + } + + if (endpointDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + return Promise.all(endpointDeletionOperations); +} + +/** + * Removes all temporary batch prediction jobs + * @param {string} projectId the project to remove prediction jobs from + * @returns {Promise} + */ +async function cleanBatchPredictionJobs(projectId) { + const {JobServiceClient} = require("@google-cloud/aiplatform"); + const jobServiceClient = new JobServiceClient(clientOptions); + + const [batchPredictionJobs] = await jobServiceClient.listBatchPredictionJobs({ + parent: `projects/${projectId}/locations/${LOCATION}` + }); + + const predictionJobDeletionOperations = []; + for (const job of batchPredictionJobs) { + const {displayName, createTime, name} = job; + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = jobServiceClient.deleteBatchPredictionJob({ + name + }); + predictionJobDeletionOperations.push(deletionOp); + } + + if (predictionJobDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + return Promise.all(predictionJobDeletionOperations); +} + +/** + * Removes all of the temporary resources older than the maximum age. + * @param {string} projectId the project to remove resources from + * @returns {Promise} + */ +async function cleanAll(projectId) { + await cleanDatasets(projectId); + await cleanTrainingPipelines(projectId); + await cleanModels(projectId); + await cleanEndpoints(projectId); + await cleanBatchPredictionJobs(projectId); +} + +module.exports = { + cleanAll: cleanAll, + cleanDatasets: cleanDatasets, + cleanTrainingPipelines: cleanTrainingPipelines, + cleanModels: cleanModels, + cleanEndpoints: cleanEndpoints, + cleanBatchPredictionJobs: cleanBatchPredictionJobs +}; \ No newline at end of file diff --git a/samples/test/create-training-pipeline-image-classification.test.js b/samples/test/create-training-pipeline-image-classification.test.js index c6c506e0..29a19e36 100644 --- a/samples/test/create-training-pipeline-image-classification.test.js +++ b/samples/test/create-training-pipeline-image-classification.test.js @@ -17,7 +17,8 @@ 'use strict'; const {assert} = require('chai'); -const {after, describe, it} = require('mocha'); +const {after, before, describe, it} = require('mocha'); +const clean = require("./clean"); const uuid = require('uuid').v4; const cp = require('child_process'); @@ -41,6 +42,11 @@ const location = process.env.LOCATION; let trainingPipelineId; describe('AI platform create training pipeline image classification', () => { + + before('should delete any old and/or orphaned resources', async () => { + await clean.cleanTrainingPipelines(project); + }); + it('should create a new image classification training pipeline', async () => { const stdout = execSync( `node ./create-training-pipeline-image-classification.js ${datasetId} ${modelDisplayName} ${trainingPipelineDisplayName} ${project} ${location}` From 5614a9621626b4275a6c4f730b47f995fb21ebba Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 5 Jan 2021 17:18:03 -0800 Subject: [PATCH 2/6] fix: lint --- samples/test/clean.js | 299 +++++++++--------- ...ning-pipeline-image-classification.test.js | 3 +- 2 files changed, 152 insertions(+), 150 deletions(-) diff --git a/samples/test/clean.js b/samples/test/clean.js index 04166b7c..c6854cf8 100644 --- a/samples/test/clean.js +++ b/samples/test/clean.js @@ -15,7 +15,7 @@ /** * This module contains utility functions for removing unneeded, stale, or * orphaned resources from test project. - * + * * Removes: * - Datasets * - Training pipelines @@ -30,7 +30,7 @@ const LOCATION = 'us-central1'; // All AI Platform resources need to specify a hostname. const clientOptions = { - apiEndpoint: 'us-central1-aiplatform.googleapis.com', + apiEndpoint: 'us-central1-aiplatform.googleapis.com', }; /** @@ -41,19 +41,19 @@ const clientOptions = { * @returns {bool} */ function checkDeletionStatus(displayName, createTime) { - const NOW = new Date(); - // Check whether this dataset is a temporary resource - if (displayName.indexOf(TEMP_RESOURCE_PREFIX) == -1) { - return false; - } - - // Check how old the dataset is - const ageOfResource = new Date(createTime.seconds * 1000); - if (NOW - ageOfResource < MAXIMUM_AGE) { - return false; - } - - return true; + const NOW = new Date(); + // Check whether this dataset is a temporary resource + if (displayName.indexOf(TEMP_RESOURCE_PREFIX) == -1) { + return false; + } + + // Check how old the dataset is + const ageOfResource = new Date(createTime.seconds * 1000); + if (NOW - ageOfResource < MAXIMUM_AGE) { + return false; + } + + return true; } /** @@ -62,30 +62,30 @@ function checkDeletionStatus(displayName, createTime) { * @returns {Promise} */ async function cleanDatasets(projectId) { - const {DatasetServiceClient} = require("@google-cloud/aiplatform"); - const datasetServiceClient = new DatasetServiceClient(clientOptions); - - const [datasets] = await datasetServiceClient.listDatasets({ - parent: `projects/${projectId}/locations/${LOCATION}` - }); - - const datasetDeletionOperations = []; - for (const dataset of datasets) { - const {displayName, createTime, name} = dataset; - - if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = datasetServiceClient.deleteDataset({ - name - }) - datasetDeletionOperations.push(deletionOp); - } - - if (datasetDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; - } + const {DatasetServiceClient} = require('@google-cloud/aiplatform'); + const datasetServiceClient = new DatasetServiceClient(clientOptions); + + const [datasets] = await datasetServiceClient.listDatasets({ + parent: `projects/${projectId}/locations/${LOCATION}`, + }); + + const datasetDeletionOperations = []; + for (const dataset of datasets) { + const {displayName, createTime, name} = dataset; + + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = datasetServiceClient.deleteDataset({ + name, + }); + datasetDeletionOperations.push(deletionOp); } - return Promise.all(datasetDeletionOperations); + if (datasetDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + + return Promise.all(datasetDeletionOperations); } /** @@ -94,29 +94,29 @@ async function cleanDatasets(projectId) { * @returns {Promise} */ async function cleanTrainingPipelines(projectId) { - const {PipelineServiceClient} = require("@google-cloud/aiplatform"); - const pipelineServiceClient = new PipelineServiceClient(clientOptions); - - const [pipelines] = await pipelineServiceClient.listTrainingPipelines({ - parent: `projects/${projectId}/locations/${LOCATION}` - }); - - const pipelineDeletionOperations = []; - for (const pipeline of pipelines) { - const {displayName, createTime, name} = pipeline; - - if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = pipelineServiceClient.deleteTrainingPipeline({ - name - }) - pipelineDeletionOperations.push(deletionOp); - } - - if (pipelineDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; - } + const {PipelineServiceClient} = require('@google-cloud/aiplatform'); + const pipelineServiceClient = new PipelineServiceClient(clientOptions); + + const [pipelines] = await pipelineServiceClient.listTrainingPipelines({ + parent: `projects/${projectId}/locations/${LOCATION}`, + }); + + const pipelineDeletionOperations = []; + for (const pipeline of pipelines) { + const {displayName, createTime, name} = pipeline; + + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = pipelineServiceClient.deleteTrainingPipeline({ + name, + }); + pipelineDeletionOperations.push(deletionOp); + } + + if (pipelineDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; } - return Promise.all(pipelineDeletionOperations); + } + return Promise.all(pipelineDeletionOperations); } /** @@ -125,41 +125,44 @@ async function cleanTrainingPipelines(projectId) { * @returns {Promise} */ async function cleanModels(projectId) { - const {ModelServiceClient, EndpointServiceClient} = require("@google-cloud/aiplatform"); - const modelServiceClient = new ModelServiceClient(clientOptions); - - const [models] = await modelServiceClient.listModels({ - parent: `projects/${projectId}/locations/${LOCATION}` - }); - - const modelDeletionOperations = []; - for (const model of models) { - const {displayName, createTime, deployedModels, name} = model; - - if (checkDeletionStatus(displayName, createTime)) { - // Need to check if model is deployed to an endpoint - // Undeploy the model everywhere it is deployed - for (const deployedModel of deployedModels) { - const {endpoint, deployedModelId} = deployedModel; - - const endpointServiceClient = new EndpointServiceClient(clientOptions); - await endpointServiceClient.undeployModel({ - endpoint, - deployedModelId - }); - } - - const deletionOp = modelServiceClient.deleteModel({ - name - }); - modelDeletionOperations.push(deletionOp); - } - - if (modelDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; - } + const { + ModelServiceClient, + EndpointServiceClient, + } = require('@google-cloud/aiplatform'); + const modelServiceClient = new ModelServiceClient(clientOptions); + + const [models] = await modelServiceClient.listModels({ + parent: `projects/${projectId}/locations/${LOCATION}`, + }); + + const modelDeletionOperations = []; + for (const model of models) { + const {displayName, createTime, deployedModels, name} = model; + + if (checkDeletionStatus(displayName, createTime)) { + // Need to check if model is deployed to an endpoint + // Undeploy the model everywhere it is deployed + for (const deployedModel of deployedModels) { + const {endpoint, deployedModelId} = deployedModel; + + const endpointServiceClient = new EndpointServiceClient(clientOptions); + await endpointServiceClient.undeployModel({ + endpoint, + deployedModelId, + }); + } + + const deletionOp = modelServiceClient.deleteModel({ + name, + }); + modelDeletionOperations.push(deletionOp); } - return Promise.all(modelDeletionOperations); + + if (modelDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + return Promise.all(modelDeletionOperations); } /** @@ -168,29 +171,29 @@ async function cleanModels(projectId) { * @returns {Promise} */ async function cleanEndpoints(projectId) { - const {EndpointServiceClient} = require("@google-cloud/aiplatform"); - const endpointServiceClient = new EndpointServiceClient(clientOptions); - - const [endpoints] = await endpointServiceClient.listEndpoints({ - parent: `projects/${projectId}/locations/${LOCATION}` - }); - - const endpointDeletionOperations = []; - for (const endpoint of endpoints) { - const {displayName, createTime, name} = endpoint; - - if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = endpointServiceClient.deleteEndpoint({ - name - }); - endpointDeletionOperations.push(deletionOp); - } - - if (endpointDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; - } + const {EndpointServiceClient} = require('@google-cloud/aiplatform'); + const endpointServiceClient = new EndpointServiceClient(clientOptions); + + const [endpoints] = await endpointServiceClient.listEndpoints({ + parent: `projects/${projectId}/locations/${LOCATION}`, + }); + + const endpointDeletionOperations = []; + for (const endpoint of endpoints) { + const {displayName, createTime, name} = endpoint; + + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = endpointServiceClient.deleteEndpoint({ + name, + }); + endpointDeletionOperations.push(deletionOp); + } + + if (endpointDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; } - return Promise.all(endpointDeletionOperations); + } + return Promise.all(endpointDeletionOperations); } /** @@ -199,28 +202,28 @@ async function cleanEndpoints(projectId) { * @returns {Promise} */ async function cleanBatchPredictionJobs(projectId) { - const {JobServiceClient} = require("@google-cloud/aiplatform"); - const jobServiceClient = new JobServiceClient(clientOptions); - - const [batchPredictionJobs] = await jobServiceClient.listBatchPredictionJobs({ - parent: `projects/${projectId}/locations/${LOCATION}` - }); - - const predictionJobDeletionOperations = []; - for (const job of batchPredictionJobs) { - const {displayName, createTime, name} = job; - if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = jobServiceClient.deleteBatchPredictionJob({ - name - }); - predictionJobDeletionOperations.push(deletionOp); - } - - if (predictionJobDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; - } + const {JobServiceClient} = require('@google-cloud/aiplatform'); + const jobServiceClient = new JobServiceClient(clientOptions); + + const [batchPredictionJobs] = await jobServiceClient.listBatchPredictionJobs({ + parent: `projects/${projectId}/locations/${LOCATION}`, + }); + + const predictionJobDeletionOperations = []; + for (const job of batchPredictionJobs) { + const {displayName, createTime, name} = job; + if (checkDeletionStatus(displayName, createTime)) { + const deletionOp = jobServiceClient.deleteBatchPredictionJob({ + name, + }); + predictionJobDeletionOperations.push(deletionOp); } - return Promise.all(predictionJobDeletionOperations); + + if (predictionJobDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { + break; + } + } + return Promise.all(predictionJobDeletionOperations); } /** @@ -229,18 +232,18 @@ async function cleanBatchPredictionJobs(projectId) { * @returns {Promise} */ async function cleanAll(projectId) { - await cleanDatasets(projectId); - await cleanTrainingPipelines(projectId); - await cleanModels(projectId); - await cleanEndpoints(projectId); - await cleanBatchPredictionJobs(projectId); + await cleanDatasets(projectId); + await cleanTrainingPipelines(projectId); + await cleanModels(projectId); + await cleanEndpoints(projectId); + await cleanBatchPredictionJobs(projectId); } module.exports = { - cleanAll: cleanAll, - cleanDatasets: cleanDatasets, - cleanTrainingPipelines: cleanTrainingPipelines, - cleanModels: cleanModels, - cleanEndpoints: cleanEndpoints, - cleanBatchPredictionJobs: cleanBatchPredictionJobs -}; \ No newline at end of file + cleanAll: cleanAll, + cleanDatasets: cleanDatasets, + cleanTrainingPipelines: cleanTrainingPipelines, + cleanModels: cleanModels, + cleanEndpoints: cleanEndpoints, + cleanBatchPredictionJobs: cleanBatchPredictionJobs, +}; diff --git a/samples/test/create-training-pipeline-image-classification.test.js b/samples/test/create-training-pipeline-image-classification.test.js index 29a19e36..2eff3c33 100644 --- a/samples/test/create-training-pipeline-image-classification.test.js +++ b/samples/test/create-training-pipeline-image-classification.test.js @@ -18,7 +18,7 @@ const {assert} = require('chai'); const {after, before, describe, it} = require('mocha'); -const clean = require("./clean"); +const clean = require('./clean'); const uuid = require('uuid').v4; const cp = require('child_process'); @@ -42,7 +42,6 @@ const location = process.env.LOCATION; let trainingPipelineId; describe('AI platform create training pipeline image classification', () => { - before('should delete any old and/or orphaned resources', async () => { await clean.cleanTrainingPipelines(project); }); From 3b56bacb87d8547eedd03cb4dfbf1de0feca5262 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 5 Jan 2021 17:21:00 -0800 Subject: [PATCH 3/6] fix: typo --- samples/test/clean.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/test/clean.js b/samples/test/clean.js index c6854cf8..298f001b 100644 --- a/samples/test/clean.js +++ b/samples/test/clean.js @@ -42,12 +42,12 @@ const clientOptions = { */ function checkDeletionStatus(displayName, createTime) { const NOW = new Date(); - // Check whether this dataset is a temporary resource + // Check whether this resources is a temporary resource if (displayName.indexOf(TEMP_RESOURCE_PREFIX) == -1) { return false; } - // Check how old the dataset is + // Check how old the resource is const ageOfResource = new Date(createTime.seconds * 1000); if (NOW - ageOfResource < MAXIMUM_AGE) { return false; From c68c68b0acacea09881c517742dfb78e01181cd4 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Wed, 6 Jan 2021 09:10:56 -0800 Subject: [PATCH 4/6] fix: lint --- samples/test/clean.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/test/clean.js b/samples/test/clean.js index 298f001b..7d6ed7a6 100644 --- a/samples/test/clean.js +++ b/samples/test/clean.js @@ -43,7 +43,7 @@ const clientOptions = { function checkDeletionStatus(displayName, createTime) { const NOW = new Date(); // Check whether this resources is a temporary resource - if (displayName.indexOf(TEMP_RESOURCE_PREFIX) == -1) { + if (displayName.indexOf(TEMP_RESOURCE_PREFIX) === -1) { return false; } From 8de7adc2e6d851da75c6e2775ec3d4043eda5149 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Thu, 7 Jan 2021 11:36:31 -0800 Subject: [PATCH 5/6] fix: per reviewer --- samples/test/clean.js | 46 +++++-------------------------------------- 1 file changed, 5 insertions(+), 41 deletions(-) diff --git a/samples/test/clean.js b/samples/test/clean.js index 7d6ed7a6..dfbd00a2 100644 --- a/samples/test/clean.js +++ b/samples/test/clean.js @@ -69,23 +69,15 @@ async function cleanDatasets(projectId) { parent: `projects/${projectId}/locations/${LOCATION}`, }); - const datasetDeletionOperations = []; for (const dataset of datasets) { const {displayName, createTime, name} = dataset; if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = datasetServiceClient.deleteDataset({ + await datasetServiceClient.deleteDataset({ name, }); - datasetDeletionOperations.push(deletionOp); - } - - if (datasetDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; } } - - return Promise.all(datasetDeletionOperations); } /** @@ -101,22 +93,15 @@ async function cleanTrainingPipelines(projectId) { parent: `projects/${projectId}/locations/${LOCATION}`, }); - const pipelineDeletionOperations = []; for (const pipeline of pipelines) { const {displayName, createTime, name} = pipeline; if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = pipelineServiceClient.deleteTrainingPipeline({ + await pipelineServiceClient.deleteTrainingPipeline({ name, }); - pipelineDeletionOperations.push(deletionOp); - } - - if (pipelineDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; } } - return Promise.all(pipelineDeletionOperations); } /** @@ -135,7 +120,6 @@ async function cleanModels(projectId) { parent: `projects/${projectId}/locations/${LOCATION}`, }); - const modelDeletionOperations = []; for (const model of models) { const {displayName, createTime, deployedModels, name} = model; @@ -152,17 +136,11 @@ async function cleanModels(projectId) { }); } - const deletionOp = modelServiceClient.deleteModel({ + await modelServiceClient.deleteModel({ name, }); - modelDeletionOperations.push(deletionOp); - } - - if (modelDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; } } - return Promise.all(modelDeletionOperations); } /** @@ -178,22 +156,15 @@ async function cleanEndpoints(projectId) { parent: `projects/${projectId}/locations/${LOCATION}`, }); - const endpointDeletionOperations = []; for (const endpoint of endpoints) { const {displayName, createTime, name} = endpoint; if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = endpointServiceClient.deleteEndpoint({ + await endpointServiceClient.deleteEndpoint({ name, }); - endpointDeletionOperations.push(deletionOp); - } - - if (endpointDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; } } - return Promise.all(endpointDeletionOperations); } /** @@ -209,21 +180,14 @@ async function cleanBatchPredictionJobs(projectId) { parent: `projects/${projectId}/locations/${LOCATION}`, }); - const predictionJobDeletionOperations = []; for (const job of batchPredictionJobs) { const {displayName, createTime, name} = job; if (checkDeletionStatus(displayName, createTime)) { - const deletionOp = jobServiceClient.deleteBatchPredictionJob({ + await jobServiceClient.deleteBatchPredictionJob({ name, }); - predictionJobDeletionOperations.push(deletionOp); - } - - if (predictionJobDeletionOperations.length > MAXIMUM_NUMBER_OF_DELETIONS) { - break; } } - return Promise.all(predictionJobDeletionOperations); } /** From 8ddf5161c831738c2f217f080362c38050bc95f9 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Thu, 7 Jan 2021 11:51:41 -0800 Subject: [PATCH 6/6] fix: lint --- samples/test/clean.js | 1 - 1 file changed, 1 deletion(-) diff --git a/samples/test/clean.js b/samples/test/clean.js index dfbd00a2..458d9895 100644 --- a/samples/test/clean.js +++ b/samples/test/clean.js @@ -24,7 +24,6 @@ * - Batch prediction jobs */ const MAXIMUM_AGE = 3600000 * 24 * 2; // 2 days in milliseconds -const MAXIMUM_NUMBER_OF_DELETIONS = 10; // Avoid hitting LRO quota const TEMP_RESOURCE_PREFIX = 'temp'; const LOCATION = 'us-central1';