From d7ddf7564a40f54b490d6dbbb215eeb068eec976 Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Mon, 6 Apr 2020 15:26:06 -0700 Subject: [PATCH] test: deal with concurrent import (#368) --- automl/test/import_dataset.test.js | 66 ++++++++++++++++++++++++++++-- 1 file changed, 62 insertions(+), 4 deletions(-) diff --git a/automl/test/import_dataset.test.js b/automl/test/import_dataset.test.js index 89471eac474..e96dee43c23 100644 --- a/automl/test/import_dataset.test.js +++ b/automl/test/import_dataset.test.js @@ -15,7 +15,7 @@ 'use strict'; const {assert} = require('chai'); -const {describe, it, before, after} = require('mocha'); +const {describe, it, before} = require('mocha'); const {AutoMlClient} = require('@google-cloud/automl').v1; const cp = require('child_process'); @@ -25,12 +25,66 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const IMPORT_DATASET_REGION_TAG = 'import_dataset'; const LOCATION = 'us-central1'; +const TWENTY_MINUTES_IN_SECONDS = 60 * 20; + +// If two suites of tests are running parallel, importing and creating +// datasets can fail, with: +// No other operations should be working on projects/1046198160504/*. +const delay = async test => { + const retries = test.currentRetry(); + if (retries === 0) return; // no retry on the first failure. + // see: https://cloud.google.com/storage/docs/exponential-backoff: + const ms = Math.pow(2, retries) * 1000 + Math.random() * 2000; + return new Promise(done => { + console.info(`retrying "${test.title}" in ${ms}ms`); + setTimeout(done, ms); + }); +}; describe('Automl Import Dataset Test', () => { const client = new AutoMlClient(); let datasetId; - before('should create a dataset', async () => { + before(async () => { + await cleanupOldDatasets(); + }); + + async function cleanupOldDatasets() { + const projectId = await client.getProjectId(); + let request = { + parent: client.locationPath(projectId, LOCATION), + filter: 'translation_dataset_metadata:*', + }; + const [response] = await client.listDatasets(request); + for (const dataset of response) { + try { + const id = dataset.name + .split('/') + [dataset.name.split('/').length - 1].split('\n')[0]; + console.info(`checking dataset ${id}`); + if (id.match(/test_[0-9a-f]{8}/)) { + console.info(`deleting dataset ${id}`); + if ( + dataset.createTime.seconds - Date.now() / 1000 > + TWENTY_MINUTES_IN_SECONDS + ) { + console.info(`dataset ${id} is greater than 20 minutes old`); + request = { + name: client.datasetPath(projectId, LOCATION, id), + }; + const [operation] = await client.deleteDataset(request); + await operation.promise(); + } + } + } catch (err) { + console.warn(err); + } + } + } + + it('should create a dataset', async function() { + this.retries(5); + await delay(this.test); const projectId = await client.getProjectId(); const displayName = `test_${uuid .v4() @@ -53,7 +107,9 @@ describe('Automl Import Dataset Test', () => { [response.name.split('/').length - 1].split('\n')[0]; }); - it('should create, import, and delete a dataset', async () => { + it('should import dataset', async function() { + this.retries(5); + await delay(this.test); const projectId = await client.getProjectId(); const data = `gs://${projectId}-automl-translate/en-ja-short.csv`; const import_output = execSync( @@ -62,7 +118,9 @@ describe('Automl Import Dataset Test', () => { assert.match(import_output, /Dataset imported/); }); - after('delete created dataset', async () => { + it('should delete created dataset', async function() { + this.retries(5); + await delay(this.test); const projectId = await client.getProjectId(); const request = { name: client.datasetPath(projectId, LOCATION, datasetId),