diff --git a/.github/workflows/storagetransfer.yaml b/.github/workflows/storagetransfer.yaml new file mode 100644 index 0000000000..33bcf594d9 --- /dev/null +++ b/.github/workflows/storagetransfer.yaml @@ -0,0 +1,82 @@ +name: storagetransfer +on: + push: + branches: + - main + paths: + - 'storagetransfer/**' + - '.github/workflows/storagetransfer.yaml' + pull_request: + paths: + - 'storagetransfer/**' + - '.github/workflows/storagetransfer.yaml' + pull_request_target: + types: [labeled] + paths: + - 'storagetransfer/**' + - '.github/workflows/storagetransfer.yaml' + schedule: + - cron: '0 0 * * 0' +jobs: + test: + if: ${{ github.event.action != 'labeled' || github.event.label.name == 'actions:force-run' }} + runs-on: ubuntu-latest + timeout-minutes: 60 + permissions: + contents: 'write' + pull-requests: 'write' + id-token: 'write' + steps: + - uses: actions/checkout@v3.1.0 + with: + ref: ${{github.event.pull_request.head.sha}} + - uses: 'google-github-actions/auth@v1.0.0' + with: + workload_identity_provider: 'projects/1046198160504/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider' + service_account: 'kokoro-system-test@long-door-651.iam.gserviceaccount.com' + create_credentials_file: 'true' + access_token_lifetime: 600s + - id: secrets + uses: "google-github-actions/get-secretmanager-secrets@v1" + with: + secrets: |- + sts_aws_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-aws + sts_azure_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-azure + - uses: actions/setup-node@v3.5.1 + with: + node-version: 16 + - run: npm install + working-directory: storagetransfer + - run: npm test + working-directory: storagetransfer + env: + AWS_ACCESS_KEY_ID : ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).AccessKeyId }} + AWS_SECRET_ACCESS_KEY: ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).SecretAccessKey }} + AZURE_STORAGE_ACCOUNT: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).StorageAccount }} + AZURE_CONNECTION_STRING: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).ConnectionString }} + AZURE_SAS_TOKEN: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).SAS }} + MOCHA_REPORTER_SUITENAME: storagetransfer + MOCHA_REPORTER_OUTPUT: storagetransfer_sponge_log.xml + MOCHA_REPORTER: xunit + - if: ${{ github.event.action == 'labeled' && github.event.label.name == 'actions:force-run' }} + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + try { + await github.rest.issues.removeLabel({ + name: 'actions:force-run', + owner: 'GoogleCloudPlatform', + repo: 'nodejs-docs-samples', + issue_number: context.payload.pull_request.number + }); + } catch (e) { + if (!e.message.includes('Label does not exist')) { + throw e; + } + } + - if: ${{ github.event_name == 'schedule' && always() }} + run: | + curl https://github.com/googleapis/repo-automation-bots/releases/download/flakybot-1.1.0/flakybot -o flakybot -s -L + chmod +x ./flakybot + ./flakybot --repo GoogleCloudPlatform/nodejs-docs-samples --commit_hash ${{github.sha}} --build_url https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} diff --git a/.github/workflows/workflows.json b/.github/workflows/workflows.json index b224ef1bc2..83f33196b8 100644 --- a/.github/workflows/workflows.json +++ b/.github/workflows/workflows.json @@ -82,6 +82,7 @@ "service-directory/snippets", "secret-manager", "speech", + "storagetransfer", "talent", "texttospeech", "translate", diff --git a/CODEOWNERS b/CODEOWNERS index 750ba5cc87..1c36c90c65 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -47,8 +47,9 @@ functions/memorystore @GoogleCloudPlatform/nodejs-samples-reviewers functions/spanner @jsimonweb @GoogleCloudPlatform/nodejs-samples-reviewers # SoDa teams -/cloud-sql/**/*.js @GoogleCloudPlatform/infra-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers -/datastore/**/*.js @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers +cloud-sql @GoogleCloudPlatform/infra-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers +datastore @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers +storagetransfer @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers # One-offs composer @leahecole @sofisl @GoogleCloudPlatform/nodejs-samples-reviewers diff --git a/storagetransfer/aws-request.js b/storagetransfer/aws-request.js new file mode 100644 index 0000000000..0cfc9121f9 --- /dev/null +++ b/storagetransfer/aws-request.js @@ -0,0 +1,110 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId, + description, + awsSourceBucket, + gcsSinkBucket, + awsAccessKeyId = process.env.AWS_ACCESS_KEY_ID, + awsSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY +) { + // [START storagetransfer_transfer_from_aws] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the Google Cloud Platform Project that owns the job + // projectId = 'my-project-id' + + // A useful description for your transfer job + // description = 'My transfer job' + + // AWS S3 source bucket name + // awsSourceBucket = 'my-s3-source-bucket' + + // AWS Access Key ID + // awsAccessKeyId = 'AKIA...' + + // AWS Secret Access Key + // awsSecretAccessKey = 'HEAoMK2.../...ku8' + + // Google Cloud Storage destination bucket name + // gcsSinkBucket = 'my-gcs-destination-bucket' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a one-time transfer job from Amazon S3 to Google Cloud Storage. + */ + async function transferFromS3() { + // Setting the start date and the end date as the same time creates a + // one-time transfer + const now = new Date(); + const oneTimeSchedule = { + day: now.getDate(), + month: now.getMonth() + 1, + year: now.getFullYear(), + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob({ + transferJob: { + projectId, + description, + status: 'ENABLED', + schedule: { + scheduleStartDate: oneTimeSchedule, + scheduleEndDate: oneTimeSchedule, + }, + transferSpec: { + awsS3DataSource: { + bucketName: awsSourceBucket, + awsAccessKey: { + accessKeyId: awsAccessKeyId, + secretAccessKey: awsSecretAccessKey, + }, + }, + gcsDataSink: { + bucketName: gcsSinkBucket, + }, + }, + }, + }); + + console.log( + `Created and ran a transfer job from '${awsSourceBucket}' to '${gcsSinkBucket}' with name ${transferJob.name}` + ); + } + + transferFromS3(); + // [END storagetransfer_transfer_from_aws] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err); + process.exitCode = 1; +}); diff --git a/storagetransfer/aws-s3-compatible-source-request.js b/storagetransfer/aws-s3-compatible-source-request.js new file mode 100644 index 0000000000..460a32a4ee --- /dev/null +++ b/storagetransfer/aws-s3-compatible-source-request.js @@ -0,0 +1,132 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {protos} = require('@google-cloud/storage-transfer'); +const {AuthMethod, NetworkProtocol, RequestModel} = + protos.google.storagetransfer.v1.S3CompatibleMetadata; + +async function main( + projectId = 'my-project', + sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default', + sourceBucketName = 'my-bucket-name', + sourcePath = 'path/to/data/', + gcsSinkBucket = 'my-sink-bucket', + gcsPath = 'path/to/data/', + region = 'us-east-1', + endpoint = 'us-east-1.example.com', + protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS, + requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE, + authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4 +) { + // [START storagetransfer_transfer_from_s3_compatible_source] + + // Imports the Google Cloud client library + const storageTransfer = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // Useful enums for AWS S3-Compatible Transfers + // const {AuthMethod, NetworkProtocol, RequestModel} = storageTransfer.protos.google.storagetransfer.v1.S3CompatibleMetadata; + + // Your project id + // const projectId = 'my-project'; + + // The agent pool associated with the S3-compatible data source. Defaults to the default agent + // const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default'; + + // The S3-compatible bucket name to transfer data from + // const sourceBucketName = "my-bucket-name"; + + // The S3-compatible path (object prefix) to transfer data from + // const sourcePath = "path/to/data/"; + + // The ID of the GCS bucket to transfer data to + // const gcsSinkBucket = "my-sink-bucket"; + + // The GCS path (object prefix) to transfer data to + // const gcsPath = "path/to/data/"; + + // The S3 region of the source bucket + // const region = 'us-east-1'; + + // The S3-compatible endpoint + // const endpoint = "us-east-1.example.com"; + + // The S3-compatible network protocol + // const protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS; + + // The S3-compatible request model + // const requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE; + + // The S3-compatible auth method + // const authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4; + + // Creates a client + const client = new storageTransfer.StorageTransferServiceClient(); + + /** + * Creates a transfer from an AWS S3-compatible source to GCS + */ + async function transferFromS3CompatibleSource() { + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob({ + transferJob: { + projectId, + transferSpec: { + sourceAgentPoolName, + awsS3CompatibleDataSource: { + region, + s3Metadata: { + authMethod, + protocol, + requestModel, + }, + endpoint, + bucketName: sourceBucketName, + path: sourcePath, + }, + gcsDataSink: { + bucketName: gcsSinkBucket, + path: gcsPath, + }, + }, + status: 'ENABLED', + }, + }); + + await client.runTransferJob({ + jobName: transferJob.name, + projectId, + }); + + console.log( + `Created and ran a transfer job from '${sourceBucketName}' to '${gcsSinkBucket}' with name ${transferJob.name}` + ); + } + + transferFromS3CompatibleSource(); + // [END storagetransfer_transfer_from_s3_compatible_source] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); diff --git a/storagetransfer/azure-request.js b/storagetransfer/azure-request.js new file mode 100644 index 0000000000..16e9bf10b4 --- /dev/null +++ b/storagetransfer/azure-request.js @@ -0,0 +1,110 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId, + description, + azureStorageAccount, + azureSourceContainer, + gcsSinkBucket, + azureSASToken = process.env.AZURE_SAS_TOKEN +) { + // [START storagetransfer_transfer_from_azure] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the Google Cloud Platform Project that owns the job + // projectId = 'my-project-id' + + // A useful description for your transfer job + // description = 'My transfer job' + + // Azure Storage Account name + // azureStorageAccount = 'accountname' + + // Azure Storage Account name + // azureSourceContainer = 'my-azure-source-bucket' + + // Azure Shared Access Signature token + // azureSASToken = '?sv=...' + + // Google Cloud Storage destination bucket name + // gcsSinkBucket = 'my-gcs-destination-bucket' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a one-time transfer job from Azure Blob Storage to Google Cloud Storage. + */ + async function transferFromBlobStorage() { + // Setting the start date and the end date as the same time creates a + // one-time transfer + const now = new Date(); + const oneTimeSchedule = { + day: now.getDate(), + month: now.getMonth() + 1, + year: now.getFullYear(), + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob({ + transferJob: { + projectId, + description, + status: 'ENABLED', + schedule: { + scheduleStartDate: oneTimeSchedule, + scheduleEndDate: oneTimeSchedule, + }, + transferSpec: { + azureBlobStorageDataSource: { + azureCredentials: { + sasToken: azureSASToken, + }, + container: azureSourceContainer, + storageAccount: azureStorageAccount, + }, + gcsDataSink: { + bucketName: gcsSinkBucket, + }, + }, + }, + }); + + console.log( + `Created and ran a transfer job from '${azureSourceContainer}' to '${gcsSinkBucket}' with name ${transferJob.name}` + ); + } + + transferFromBlobStorage(); + // [END storagetransfer_transfer_from_azure] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err); + process.exitCode = 1; +}); diff --git a/storagetransfer/check-latest-transfer-operation.js b/storagetransfer/check-latest-transfer-operation.js new file mode 100644 index 0000000000..0d9b7f7b89 --- /dev/null +++ b/storagetransfer/check-latest-transfer-operation.js @@ -0,0 +1,67 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main(projectId, jobName) { + // [START storagetransfer_get_latest_transfer_operation] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the Google Cloud Platform Project that owns the job + // projectId = 'my-project-id' + + // Storage Transfer Service job name + // jobName = 'transferJobs/1234567890' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Checks the latest transfer operation for a given transfer job. + */ + async function checkLatestTransferOperation() { + const [transferJob] = await client.getTransferJob({projectId, jobName}); + + if (transferJob.latestOperationName) { + const [transferOperation] = await client.operationsClient.getOperation({ + name: transferJob.latestOperationName, + }); + + const operation = JSON.stringify(transferOperation, null, 2); + + console.log(`Latest transfer operation for '${jobName}': ${operation}`); + } else { + console.log(`Transfer job '${jobName}' has not ran yet.`); + } + } + + checkLatestTransferOperation(); + // [END storagetransfer_get_latest_transfer_operation] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err); + process.exitCode = 1; +}); diff --git a/storagetransfer/get-transfer-job-with-retries.js b/storagetransfer/get-transfer-job-with-retries.js new file mode 100644 index 0000000000..0d15aa07f8 --- /dev/null +++ b/storagetransfer/get-transfer-job-with-retries.js @@ -0,0 +1,79 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main(projectId, jobName, maxRetryDelayMillis) { + // [START storagetransfer_create_retry_handler] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the Google Cloud Platform Project that owns the job + // projectId = 'my-project-id' + + // Storage Transfer Service job name + // jobName = 'transferJobs/1234567890' + + // The maximum delay time, in milliseconds, between requests + // maxRetryDelayMillis = 60000 + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Check the latest transfer operation associated with a transfer job + * with retries. + */ + async function getTransferJobWithRetries() { + // Setting the start date and the end date as the same time creates a + // one-time transfer + + const options = { + retry: { + backoffSettings: { + maxRetryDelayMillis, + }, + }, + }; + + const [transferJob] = await client.getTransferJob( + {projectId, jobName}, + options + ); + + console.log( + `Fetched transfer job: ${transferJob.name} with a maximum of ${maxRetryDelayMillis}ms delay time between requests` + ); + } + + getTransferJobWithRetries(); + // [END storagetransfer_create_retry_handler] +} + +const [projectId, jobName, maxRetryDelayMillis] = [...process.argv.slice(2)]; + +main(projectId, jobName, Number.parseInt(maxRetryDelayMillis)); + +process.on('unhandledRejection', err => { + console.error(err); + process.exitCode = 1; +}); diff --git a/storagetransfer/manifest-request.js b/storagetransfer/manifest-request.js new file mode 100644 index 0000000000..a76909c60d --- /dev/null +++ b/storagetransfer/manifest-request.js @@ -0,0 +1,99 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId = 'my-project', + sourceAgentPoolName = '', + rootDirectory = '', + gcsSinkBucket = '', + manifestLocation = '' +) { + // [START storagetransfer_manifest_request] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // Your project id + // const projectId = 'my-project' + + // The agent pool associated with the POSIX data source. Defaults to the default agent + // const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default' + + // The root directory path on the source filesystem + // const rootDirectory = '/directory/to/transfer/source' + + // The ID of the GCS bucket to transfer data to + // const gcsSinkBucket = 'my-sink-bucket' + + // Transfer manifest location. Must be a `gs:` URL + // const manifestLocation = 'gs://my-bucket/sample_manifest.csv' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a request to transfer from the local file system to the sink bucket + */ + async function transferViaManifest() { + const createRequest = { + transferJob: { + projectId, + transferSpec: { + sourceAgentPoolName, + posixDataSource: { + rootDirectory, + }, + gcsDataSink: {bucketName: gcsSinkBucket}, + transferManifest: { + location: manifestLocation, + }, + }, + status: 'ENABLED', + }, + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob(createRequest); + + const runRequest = { + jobName: transferJob.name, + projectId: projectId, + }; + + await client.runTransferJob(runRequest); + + console.log( + `Created and ran a transfer job from '${rootDirectory}' to '${gcsSinkBucket}' using manifest \`${manifestLocation}\` with name ${transferJob.name}` + ); + } + + transferViaManifest(); + // [END storagetransfer_manifest_request] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); diff --git a/storagetransfer/nearline-request.js b/storagetransfer/nearline-request.js new file mode 100644 index 0000000000..63b2e9c49c --- /dev/null +++ b/storagetransfer/nearline-request.js @@ -0,0 +1,113 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId, + description, + gcsSourceBucket, + gcsSinkBucket, + startDate = new Date() +) { + // [START storagetransfer_transfer_to_nearline] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the Google Cloud Platform Project that owns the job + // projectId = 'my-project-id' + + // A useful description for your transfer job + // description = 'My transfer job' + + // Google Cloud Storage source bucket name + // gcsSourceBucket = 'my-gcs-source-bucket' + + // Google Cloud Storage destination bucket name + // gcsSinkBucket = 'my-gcs-destination-bucket' + + // Date to start daily migration + // startDate = new Date() + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Create a daily migration from a GCS bucket to another GCS bucket for + * objects untouched for 30+ days. + */ + async function createDailyNearline30DayMigration() { + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob({ + transferJob: { + projectId, + description, + status: 'ENABLED', + schedule: { + scheduleStartDate: { + day: startDate.getDate(), + month: startDate.getMonth() + 1, + year: startDate.getFullYear(), + }, + }, + transferSpec: { + gcsDataSource: { + bucketName: gcsSourceBucket, + }, + gcsDataSink: { + bucketName: gcsSinkBucket, + }, + objectConditions: { + minTimeElapsedSinceLastModification: { + seconds: 2592000, // 30 days + }, + }, + transferOptions: { + deleteObjectsFromSourceAfterTransfer: true, + }, + }, + }, + }); + + console.log(`Created transferJob: ${transferJob.name}`); + } + + createDailyNearline30DayMigration(); + // [END storagetransfer_transfer_to_nearline] +} + +const [projectId, description, gcsSourceBucket, gcsSinkBucket, startDate] = [ + ...process.argv.slice(2), +]; + +main( + projectId, + description, + gcsSourceBucket, + gcsSinkBucket, + new Date(startDate) +); + +process.on('unhandledRejection', err => { + console.error(err); + process.exitCode = 1; +}); diff --git a/storagetransfer/package.json b/storagetransfer/package.json new file mode 100644 index 0000000000..a54b6b8358 --- /dev/null +++ b/storagetransfer/package.json @@ -0,0 +1,27 @@ +{ + "name": "nodejs-storage-transfer", + "private": true, + "license": "Apache-2.0", + "author": "Google LLC", + "engines": { + "node": ">=12.0.0" + }, + "files": [ + "*.js" + ], + "scripts": { + "test": "c8 mocha ---parallel --timeout 600000 test/*.test.js" + }, + "dependencies": { + "@google-cloud/storage-transfer": "^2.2.1" + }, + "devDependencies": { + "@azure/storage-blob": "^12.12.0", + "@google-cloud/storage": "^6.0.0", + "aws-sdk": "^2.1073.0", + "c8": "^7.1.0", + "chai": "^4.2.0", + "mocha": "^9.0.0", + "uuid": "^9.0.0" + } +} diff --git a/storagetransfer/posix-download.js b/storagetransfer/posix-download.js new file mode 100644 index 0000000000..3314992288 --- /dev/null +++ b/storagetransfer/posix-download.js @@ -0,0 +1,99 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId = 'my-project', + sinkAgentPoolName = '', + gcsSourceBucket = '', + gcsSourcePath = '', + rootDirectory = '' +) { + // [START storagetransfer_download_to_posix] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // Your project id + // const projectId = 'my-project' + + // The agent pool associated with the POSIX data sink. Defaults to the default agent + // const sinkAgentPoolName = 'projects/my-project/agentPools/transfer_service_default' + + // Google Cloud Storage source bucket name + // const gcsSourceBucket = 'my-gcs-source-bucket' + + // An optional path on the Google Cloud Storage bucket to download from + // const gcsSourcePath = 'foo/bar/' + + // The root directory path on the destination filesystem + // const rootDirectory = '/directory/to/transfer/sink' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a request to transfer from the local file system to the sink bucket + */ + async function downloadFromGCS() { + const createRequest = { + transferJob: { + projectId, + transferSpec: { + sinkAgentPoolName, + gcsDataSource: { + bucketName: gcsSourceBucket, + path: gcsSourcePath, + }, + posixDataSink: { + rootDirectory, + }, + }, + status: 'ENABLED', + }, + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob(createRequest); + + const runRequest = { + jobName: transferJob.name, + projectId: projectId, + }; + + await client.runTransferJob(runRequest); + + console.log( + `Downloading from '${gcsSourceBucket}' (path: \`${gcsSourcePath}\`) to '${rootDirectory}' with name ${transferJob.name}` + ); + } + + downloadFromGCS(); + // [END storagetransfer_download_to_posix] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); diff --git a/storagetransfer/posix-request.js b/storagetransfer/posix-request.js new file mode 100644 index 0000000000..e405c6f8c9 --- /dev/null +++ b/storagetransfer/posix-request.js @@ -0,0 +1,92 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId = 'my-project', + sourceAgentPoolName = '', + rootDirectory = '', + gcsSinkBucket = '' +) { + // [START storagetransfer_transfer_from_posix] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // Your project id + // const projectId = 'my-project' + + // The agent pool associated with the POSIX data source. Defaults to the default agent + // const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default' + + // The root directory path on the source filesystem + // const rootDirectory = '/directory/to/transfer/source' + + // The ID of the GCS bucket to transfer data to + // const gcsSinkBucket = 'my-sink-bucket' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a request to transfer from the local file system to the sink bucket + */ + async function transferDirectory() { + const createRequest = { + transferJob: { + projectId, + transferSpec: { + sourceAgentPoolName, + posixDataSource: { + rootDirectory, + }, + gcsDataSink: {bucketName: gcsSinkBucket}, + }, + status: 'ENABLED', + }, + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob(createRequest); + + const runRequest = { + jobName: transferJob.name, + projectId: projectId, + }; + + await client.runTransferJob(runRequest); + + console.log( + `Created and ran a transfer job from '${rootDirectory}' to '${gcsSinkBucket}' with name ${transferJob.name}` + ); + } + + transferDirectory(); + // [END storagetransfer_transfer_from_posix] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); diff --git a/storagetransfer/posix-to-posix-request.js b/storagetransfer/posix-to-posix-request.js new file mode 100644 index 0000000000..8228f20998 --- /dev/null +++ b/storagetransfer/posix-to-posix-request.js @@ -0,0 +1,106 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main( + projectId = 'my-project', + sourceAgentPoolName = '', + sinkAgentPoolName = '', + rootDirectory = '', + destinationDirectory = '', + bucketName = '' +) { + // [START storagetransfer_transfer_posix_to_posix] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // Your project id + // const projectId = 'my-project' + + // The agent pool associated with the POSIX data source. Defaults to the default agent + // const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default' + + // The agent pool associated with the POSIX data sink. Defaults to the default agent + // const sinkAgentPoolName = 'projects/my-project/agentPools/transfer_service_default' + + // The root directory path on the source filesystem + // const rootDirectory = '/directory/to/transfer/source' + + // The root directory path on the sink filesystem + // const destinationDirectory = '/directory/to/transfer/sink' + + // The ID of the GCS bucket for intermediate storage + // const bucketName = 'my-intermediate-bucket' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a request to transfer from the local file system to the sink bucket + */ + async function transferDirectory() { + const createRequest = { + transferJob: { + projectId, + transferSpec: { + sourceAgentPoolName, + sinkAgentPoolName, + posixDataSource: { + rootDirectory, + }, + posixDataSink: { + rootDirectory: destinationDirectory, + }, + gcsIntermediateDataLocation: { + bucketName, + }, + }, + status: 'ENABLED', + }, + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob(createRequest); + + const runRequest = { + jobName: transferJob.name, + projectId: projectId, + }; + + await client.runTransferJob(runRequest); + + console.log( + `Created and ran a transfer job from '${rootDirectory}' to '${destinationDirectory}' with name ${transferJob.name}` + ); + } + + transferDirectory(); + // [END storagetransfer_transfer_posix_to_posix] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); diff --git a/storagetransfer/quickstart.js b/storagetransfer/quickstart.js new file mode 100644 index 0000000000..f8d689408a --- /dev/null +++ b/storagetransfer/quickstart.js @@ -0,0 +1,79 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +'use strict'; + +async function main(projectId = 'my-project', gcsSourceBucket, gcsSinkBucket) { + // [START storagetransfer_quickstart] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + } = require('@google-cloud/storage-transfer'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // Your project id + // const projectId = 'my-project' + + // The ID of the GCS bucket to transfer data from + // const gcsSourceBucket = 'my-source-bucket' + + // The ID of the GCS bucket to transfer data to + // const gcsSinkBucket = 'my-sink-bucket' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Creates a one-time transfer job. + */ + async function quickstart() { + // Creates a request to transfer from the source bucket to + // the sink bucket + const createRequest = { + transferJob: { + projectId: projectId, + transferSpec: { + gcsDataSource: {bucketName: gcsSourceBucket}, + gcsDataSink: {bucketName: gcsSinkBucket}, + }, + status: 'ENABLED', + }, + }; + + // Runs the request and creates the job + const [transferJob] = await client.createTransferJob(createRequest); + + const runRequest = { + jobName: transferJob.name, + projectId: projectId, + }; + await client.runTransferJob(runRequest); + + console.log( + `Created and ran a transfer job from ${gcsSourceBucket} to ${gcsSinkBucket} with name ${transferJob.name}` + ); + } + + quickstart(); + // [END storagetransfer_quickstart] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); diff --git a/storagetransfer/test/aws-request.test.js b/storagetransfer/test/aws-request.test.js new file mode 100644 index 0000000000..745b2b1af6 --- /dev/null +++ b/storagetransfer/test/aws-request.test.js @@ -0,0 +1,62 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('aws-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let description; + let awsSourceBucket; + let gcsSinkBucket; + + before(async () => { + testBucketManager.setupS3(); + + projectId = await testBucketManager.getProjectId(); + awsSourceBucket = await testBucketManager.generateS3Bucket(); + gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name; + description = `My transfer job from '${awsSourceBucket}' -> '${gcsSinkBucket}'`; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should create a transfer job from S3 to GCS', async () => { + const output = await runSample('aws-request', [ + projectId, + description, + awsSourceBucket, + gcsSinkBucket, + ]); + + assert.include(output, 'Created and ran a transfer job'); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + + testTransferJobManager.transferJobToCleanUp(jobName); + }); +}); diff --git a/storagetransfer/test/aws-s3-compatible-source-request.test.js b/storagetransfer/test/aws-s3-compatible-source-request.test.js new file mode 100644 index 0000000000..25a2fd3f0c --- /dev/null +++ b/storagetransfer/test/aws-s3-compatible-source-request.test.js @@ -0,0 +1,91 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('aws-s3-compatible-source-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + const {NetworkProtocol, RequestModel, AuthMethod} = + TransferJobManager.protos.storagetransfer.v1.S3CompatibleMetadata; + + let projectId; + let sourceAgentPoolName; + let sourceBucketName; + let sourcePath; + let gcsSinkBucket; + let gcsPath; + let region; + let endpoint; + let protocol; + let requestModel; + let authMethod; + + before(async () => { + projectId = await testTransferJobManager.client.getProjectId(); + + // Use default pool + sourceAgentPoolName = ''; + + const sourceBucket = await testBucketManager.generateGCSBucket(); + sourceBucketName = sourceBucket.name; + sourcePath = 'path/to/data/'; + + gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name; + gcsPath = 'path/to/data/'; + + region = sourceBucket.getMetadata().location; + endpoint = sourceBucket.baseUrl; + protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS; + requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE; + authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should create a transfer job from an AWS S3-compatible source to GCS', async () => { + const output = await runSample('aws-s3-compatible-source-request', [ + projectId, + sourceAgentPoolName, + sourceBucketName, + sourcePath, + gcsSinkBucket, + gcsPath, + region, + endpoint, + protocol, + requestModel, + authMethod, + ]); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + if (jobName) { + testTransferJobManager.transferJobToCleanUp(jobName); + } + + // Find at least 1 transfer operation from the transfer job in the output + assert.include(output, 'Created and ran a transfer job'); + }); +}); diff --git a/storagetransfer/test/azure-request.test.js b/storagetransfer/test/azure-request.test.js new file mode 100644 index 0000000000..1d9570dd86 --- /dev/null +++ b/storagetransfer/test/azure-request.test.js @@ -0,0 +1,83 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('azure-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let description; + let azureStorageAccount; + let azureSourceContainer; + let gcsSinkBucket; + + before(async () => { + assert( + process.env.AZURE_CONNECTION_STRING, + 'environment variable AZURE_CONNECTION_STRING is required' + ); + + testBucketManager.setupBlobStorageFromConnectionString( + process.env.AZURE_CONNECTION_STRING + ); + + azureStorageAccount = + process.env.AZURE_STORAGE_ACCOUNT || + testBucketManager.blobStorage.accountName; + + projectId = await testBucketManager.getProjectId(); + azureSourceContainer = + await testBucketManager.generateBlobStorageContainer(); + gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name; + description = `My transfer job from '${azureSourceContainer}' -> '${gcsSinkBucket}'`; + + if (!process.env.AZURE_SAS_TOKEN) { + // For security purposes we only want to pass this value via environment, not cli + process.env.AZURE_SAS_TOKEN = new URL( + testBucketManager.blobStorage.storageClientContext.url + ).search; + } + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should create a transfer job from Azure to GCS', async () => { + const output = await runSample('azure-request', [ + projectId, + description, + azureStorageAccount, + azureSourceContainer, + gcsSinkBucket, + ]); + + assert.include(output, 'Created and ran a transfer job'); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + + testTransferJobManager.transferJobToCleanUp(jobName); + }); +}); diff --git a/storagetransfer/test/check-latest-transfer-operation.test.js b/storagetransfer/test/check-latest-transfer-operation.test.js new file mode 100644 index 0000000000..c0e54c477f --- /dev/null +++ b/storagetransfer/test/check-latest-transfer-operation.test.js @@ -0,0 +1,59 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('check-latest-transfer-operation', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let transferJob; + + before(async () => { + projectId = await testBucketManager.getProjectId(); + const result = await testTransferJobManager.createTestTransferJob(); + + transferJob = result.transferJob; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should check latest transfer operation', async () => { + await testTransferJobManager.client.runTransferJob({ + projectId, + jobName: transferJob.name, + }); + + const output = await runSample('check-latest-transfer-operation', [ + projectId, + transferJob.name, + ]); + + const formattedTransferJob = transferJob.name.replace('/', '-'); + + // Find the transfer operation from the transfer job in the output + assert.include(output, `transferOperations/${formattedTransferJob}-`); + }); +}); diff --git a/storagetransfer/test/get-transfer-job-with-retries.test.js b/storagetransfer/test/get-transfer-job-with-retries.test.js new file mode 100644 index 0000000000..51a011c157 --- /dev/null +++ b/storagetransfer/test/get-transfer-job-with-retries.test.js @@ -0,0 +1,51 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('get-transfer-job-with-retries', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let transferJob; + + before(async () => { + projectId = await testBucketManager.getProjectId(); + const result = await testTransferJobManager.createTestTransferJob(); + + transferJob = result.transferJob; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should get a transfer job with retries', async () => { + const output = await runSample('get-transfer-job-with-retries', [ + projectId, + transferJob.name, + ]); + + assert.include(output, `Fetched transfer job: ${transferJob.name}`); + }); +}); diff --git a/storagetransfer/test/manifest-request.test.js b/storagetransfer/test/manifest-request.test.js new file mode 100644 index 0000000000..8f147fa362 --- /dev/null +++ b/storagetransfer/test/manifest-request.test.js @@ -0,0 +1,93 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const fs = require('fs').promises; +const os = require('os'); +const path = require('path'); + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('manifest-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let sourceAgentPoolName; + let rootDirectory; + let gcsSinkBucket; + let manifestLocation; + + let tempManifestObject; + let tempFile; + + before(async () => { + projectId = await testTransferJobManager.client.getProjectId(); + + // Use default pool + sourceAgentPoolName = ''; + + rootDirectory = await fs.mkdtemp( + path.join(os.tmpdir(), 'sts-manifest-request-test-src-') + ); + + const bucket = await testBucketManager.generateGCSBucket(); + + gcsSinkBucket = bucket.name; + + tempFile = path.join(rootDirectory, 'text.txt'); + await fs.writeFile(tempFile, 'test data'); + + // Double-quote to escape double-quotes in CSV text + const csvContent = `"${tempFile.replace(/"/g, '""')}"`; + + tempManifestObject = bucket.file('manifest.csv'); + await tempManifestObject.save(csvContent); + + manifestLocation = `gs://${bucket.name}/${tempManifestObject.name}`; + }); + + after(async () => { + await tempManifestObject.delete(); + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + await fs.unlink(tempFile); + await fs.rmdir(rootDirectory); + }); + + it('should create a transfer job using a manifest from POSIX to GCS', async () => { + const output = await runSample('manifest-request', [ + projectId, + sourceAgentPoolName, + rootDirectory, + gcsSinkBucket, + manifestLocation, + ]); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + if (jobName) { + testTransferJobManager.transferJobToCleanUp(jobName); + } + + // Find at least 1 transfer operation from the transfer job in the output + assert.include(output, 'Created and ran a transfer job'); + }); +}); diff --git a/storagetransfer/test/nearline-request.test.js b/storagetransfer/test/nearline-request.test.js new file mode 100644 index 0000000000..f730ced7c1 --- /dev/null +++ b/storagetransfer/test/nearline-request.test.js @@ -0,0 +1,61 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('nearline-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let description; + let gcsSourceBucket; + let gcsSinkBucket; + + before(async () => { + projectId = await testBucketManager.getProjectId(); + gcsSourceBucket = (await testBucketManager.generateGCSBucket()).name; + gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name; + description = `My transfer job from '${gcsSourceBucket}' -> '${gcsSinkBucket}'`; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should create a daily transfer job from S3 to GCS', async () => { + const output = await runSample('nearline-request', [ + projectId, + description, + gcsSourceBucket, + gcsSinkBucket, + new Date().toISOString(), + ]); + + assert.include(output, 'Created transferJob: transferJobs/'); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + + testTransferJobManager.transferJobToCleanUp(jobName); + }); +}); diff --git a/storagetransfer/test/posix-download.test.js b/storagetransfer/test/posix-download.test.js new file mode 100644 index 0000000000..bfe904f20b --- /dev/null +++ b/storagetransfer/test/posix-download.test.js @@ -0,0 +1,89 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const fs = require('fs').promises; +const os = require('os'); +const path = require('path'); + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('posix-download', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let sinkAgentPoolName; + let gcsSourceBucket; + let gcsSourcePath; + let rootDirectory; + + let tempObject; + + before(async () => { + projectId = await testTransferJobManager.client.getProjectId(); + + // Use default pool + sinkAgentPoolName = ''; + + const bucket = await testBucketManager.generateGCSBucket(); + gcsSourceBucket = bucket.name; + + rootDirectory = await fs.mkdtemp( + path.join(os.tmpdir(), 'sts-posix-download-test-sink-') + ); + + // API requires path to end with '/' + gcsSourcePath = rootDirectory + path.posix.sep; + + tempObject = bucket.file(path.join(rootDirectory, 'text.txt')); + + await tempObject.save('test data'); + }); + + after(async () => { + await tempObject.delete(); + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + await fs.rmdir(rootDirectory); + }); + + it('should create a transfer job from GCS to POSIX', async () => { + const output = await runSample('posix-download', [ + projectId, + sinkAgentPoolName, + gcsSourceBucket, + gcsSourcePath, + rootDirectory, + ]); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + if (jobName) { + testTransferJobManager.transferJobToCleanUp(jobName); + } + + // Find at least 1 transfer operation from the transfer job in the output + assert.include( + output, + `Downloading from '${gcsSourceBucket}' (path: \`${gcsSourcePath}\`) to '${rootDirectory}'` + ); + }); +}); diff --git a/storagetransfer/test/posix-request.test.js b/storagetransfer/test/posix-request.test.js new file mode 100644 index 0000000000..d7e0d1181d --- /dev/null +++ b/storagetransfer/test/posix-request.test.js @@ -0,0 +1,79 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const fs = require('fs').promises; +const os = require('os'); +const path = require('path'); + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('posix-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let sourceAgentPoolName; + let rootDirectory; + let gcsSinkBucket; + + let tempFile; + + before(async () => { + projectId = await testTransferJobManager.client.getProjectId(); + + // Use default pool + sourceAgentPoolName = ''; + + rootDirectory = await fs.mkdtemp( + path.join(os.tmpdir(), 'sts-posix-request-test-src-') + ); + + gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name; + + tempFile = path.join(rootDirectory, 'text.txt'); + await fs.writeFile(tempFile, 'test data'); + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + await fs.unlink(tempFile); + await fs.rmdir(rootDirectory); + }); + + it('should create a transfer job from POSIX to GCS', async () => { + const output = await runSample('posix-request', [ + projectId, + sourceAgentPoolName, + rootDirectory, + gcsSinkBucket, + ]); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + if (jobName) { + testTransferJobManager.transferJobToCleanUp(jobName); + } + + // Find at least 1 transfer operation from the transfer job in the output + assert.include(output, 'Created and ran a transfer job'); + }); +}); diff --git a/storagetransfer/test/posix-to-posix-request.test.js b/storagetransfer/test/posix-to-posix-request.test.js new file mode 100644 index 0000000000..da7fd9ad69 --- /dev/null +++ b/storagetransfer/test/posix-to-posix-request.test.js @@ -0,0 +1,89 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const fs = require('fs').promises; +const os = require('os'); +const path = require('path'); + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('posix-to-posix-request', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let sourceAgentPoolName; + let sinkAgentPoolName; + let rootDirectory; + let destinationDirectory; + let bucketName; + + let tempFile; + + before(async () => { + projectId = await testTransferJobManager.client.getProjectId(); + + // Use default pool + sourceAgentPoolName = ''; + sinkAgentPoolName = ''; + + rootDirectory = await fs.mkdtemp( + path.join(os.tmpdir(), 'sts-posix-to-posix-request-test-src-') + ); + + destinationDirectory = await fs.mkdtemp( + path.join(os.tmpdir(), 'sts-posix-to-posix-request-test-sink-') + ); + + bucketName = (await testBucketManager.generateGCSBucket()).name; + + tempFile = path.join(rootDirectory, 'text.txt'); + await fs.writeFile(tempFile, 'test data'); + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + + await fs.unlink(tempFile); + await fs.rmdir(rootDirectory); + }); + + it('should create a transfer job from POSIX to POSIX', async () => { + const output = await runSample('posix-to-posix-request', [ + projectId, + sourceAgentPoolName, + sinkAgentPoolName, + rootDirectory, + destinationDirectory, + bucketName, + ]); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + if (jobName) { + testTransferJobManager.transferJobToCleanUp(jobName); + } + + // Find at least 1 transfer operation from the transfer job in the output + assert.include(output, 'Created and ran a transfer job'); + }); +}); diff --git a/storagetransfer/test/quickstart.test.js b/storagetransfer/test/quickstart.test.js new file mode 100644 index 0000000000..9c8a5a1b27 --- /dev/null +++ b/storagetransfer/test/quickstart.test.js @@ -0,0 +1,59 @@ +// Copyright 2021 Google LLC + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('quickstart', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let sourceBucket; + let sinkBucket; + + before(async () => { + projectId = await testBucketManager.getProjectId(); + sourceBucket = (await testBucketManager.generateGCSBucket()).name; + sinkBucket = (await testBucketManager.generateGCSBucket()).name; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should run quickstart', async () => { + const output = await runSample('quickstart', [ + projectId, + sourceBucket, + sinkBucket, + ]); + + assert.include(output, 'transferJobs/'); + + // If it ran successfully and a job was created, delete it to clean up + const [jobName] = output.match(/transferJobs.*/); + + testTransferJobManager.transferJobToCleanUp(jobName); + }); +}); diff --git a/storagetransfer/test/transfer-check.test.js b/storagetransfer/test/transfer-check.test.js new file mode 100644 index 0000000000..3f59ef54f0 --- /dev/null +++ b/storagetransfer/test/transfer-check.test.js @@ -0,0 +1,59 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {assert} = require('chai'); +const {after, before, describe, it} = require('mocha'); + +const {BucketManager, TransferJobManager, runSample} = require('./utils'); + +describe('transfer-check', () => { + const testBucketManager = new BucketManager(); + const testTransferJobManager = new TransferJobManager(); + + let projectId; + let transferJob; + + before(async () => { + projectId = await testBucketManager.getProjectId(); + const result = await testTransferJobManager.createTestTransferJob(); + + transferJob = result.transferJob; + }); + + after(async () => { + await testBucketManager.deleteBuckets(); + await testTransferJobManager.cleanUp(); + }); + + it('should list operations for a transfer job', async () => { + await testTransferJobManager.client.runTransferJob({ + projectId, + jobName: transferJob.name, + }); + + const output = await runSample('transfer-check', [ + projectId, + transferJob.name, + ]); + + const formattedTransferJob = transferJob.name.replace('/', '-'); + + // Find at least 1 transfer operation from the transfer job in the output + assert.include(output, `transferOperations/${formattedTransferJob}-`); + }); +}); diff --git a/storagetransfer/test/utils/bucket.js b/storagetransfer/test/utils/bucket.js new file mode 100644 index 0000000000..d9c244d3aa --- /dev/null +++ b/storagetransfer/test/utils/bucket.js @@ -0,0 +1,256 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +// eslint-disable-next-line no-unused-vars +const {Storage, Bucket} = require('@google-cloud/storage'); +const { + StorageTransferServiceClient, +} = require('@google-cloud/storage-transfer'); +const AWS = require('aws-sdk'); +const AzureStorageBlob = require('@azure/storage-blob'); +const uuid = require('uuid'); + +class BucketManager { + constructor() { + this.client = new StorageTransferServiceClient(); + this.storage = new Storage(); + + /** + * The GCP Project ID. Cached after initial request + */ + this._cachedProjectId = ''; + + /** + * @type {Bucket[]} + */ + this.gcsBuckets = []; + /** + * @type {string[]} + */ + this.blobStorageContainers = []; + /** + * @type {string[]} + */ + this.s3Buckets = []; + } + + setupBlobStorageFromConnectionString(connectionString = '') { + this.blobStorage = + AzureStorageBlob.BlobServiceClient.fromConnectionString(connectionString); + } + + setupS3(options = {}) { + this.s3 = new AWS.S3({apiVersion: '2006-03-01', ...options}); + } + + async getProjectId() { + if (!this._cachedProjectId) { + this._cachedProjectId = await this.storage.getProjectId(); + } + + return this._cachedProjectId; + } + + /** + * Generates a unique name for GCS and S3 buckets. + * + * @returns {string} Name of bucket + */ + static generateBucketName() { + return `nodejs-sts-samples-${uuid.v4()}`; + } + + /** + * Configures permissions for STS to read/write to the bucket. + * + * @param {Bucket} bucket + * @param {number} waitForPropagation the time in milliseconds, if any, to wait + * for the policy to propigate (default 7 minutes). See for details: + * - https://cloud.google.com/iam/docs/faq#access_revoke + * - https://cloud.google.com/iam/docs/policies#structure + */ + async grantSTSPermissions(bucket, waitForPropagation = 7 * 60 * 1000) { + const [serviceAccount] = await this.client.getGoogleServiceAccount({ + projectId: await this.getProjectId(), + }); + + const member = `serviceAccount:${serviceAccount.accountEmail}`; + + const objectViewer = 'roles/storage.objectViewer'; + const bucketReader = 'roles/storage.legacyBucketReader'; + const bucketWriter = 'roles/storage.legacyBucketWriter'; + const members = [member]; + + const [policy] = await bucket.iam.getPolicy({requestedPolicyVersion: 3}); + + policy.bindings.push({ + role: objectViewer, + members: members, + }); + + policy.bindings.push({ + role: bucketReader, + members: members, + }); + + policy.bindings.push({ + role: bucketWriter, + members: members, + }); + + await bucket.iam.setPolicy(policy); + + if (waitForPropagation) { + const limit = Date.now() + waitForPropagation; + + let hasObjectViewer = false; + let hasBucketReader = false; + let hasBucketWriter = false; + + let attempts = 0; + + // Using do/while to ensure this runs at least once + do { + const [policy] = await bucket.iam.getPolicy({ + requestedPolicyVersion: 3, + }); + + for (const item of policy.bindings) { + if (item.members.includes(member)) { + switch (item.role) { + case objectViewer: + hasObjectViewer = true; + break; + case bucketReader: + hasBucketReader = true; + break; + case bucketWriter: + hasBucketWriter = true; + break; + } + } + } + + if (!hasObjectViewer || !hasBucketReader || !hasBucketWriter) { + await Promise(resolve => setTimeout(resolve, 1000 * ++attempts)); + } else { + break; + } + } while (Date.now() < limit); + + if (!hasObjectViewer || !hasBucketReader || !hasBucketWriter) { + throw new RangeError( + `'${member}' is missing the required permissions for bucket '${bucket.name()}'` + ); + } + } + } + + /** + * Generates a unique GCS bucket for testing. + * Configures STS read/write perms on the bucket. + * + * Is cached for easy clean-up via {#deleteBuckets}. + */ + async generateGCSBucket() { + const name = BucketManager.generateBucketName(); + const bucket = this.storage.bucket(name); + this.gcsBuckets.push(bucket); + + await bucket.create(); + await this.grantSTSPermissions(bucket); + + return bucket; + } + + /** + * Generates a unique Azure container for testing. + * + * Is cached for easy clean-up via {#deleteBuckets}. + */ + async generateBlobStorageContainer() { + const name = BucketManager.generateBucketName(); + + // Create a container + const containerClient = this.blobStorage.getContainerClient(name); + await containerClient.create(); + + this.blobStorageContainers.push(name); + + return name; + } + + /** + * Generates a unique S3 bucket for testing. + * + * Is cached for easy clean-up via {#deleteBuckets}. + */ + + async generateS3Bucket() { + const name = BucketManager.generateBucketName(); + + await new Promise((resolve, reject) => { + this.s3.createBucket({Bucket: name}, (error, data) => { + if (error) return reject(error); + + resolve(data); + }); + }); + + this.s3Buckets.push(name); + + return name; + } + + /** + * Deletes generated GCS & S3 test buckets. + */ + async deleteBuckets() { + for (const bucket of this.gcsBuckets) { + try { + await bucket.delete(); + } catch (e) { + console.error(e); + } + } + + for (const container of this.blobStorageContainers) { + try { + await this.blobStorage.deleteContainer(container); + } catch (e) { + console.error(e); + } + } + + for (const bucket of this.s3Buckets) { + try { + await new Promise((resolve, reject) => { + this.s3.deleteBucket({Bucket: bucket}, (error, data) => { + if (error) return reject(error); + + resolve(data); + }); + }); + } catch (e) { + console.error(e); + } + } + } +} + +module.exports = {BucketManager}; diff --git a/storagetransfer/test/utils/index.js b/storagetransfer/test/utils/index.js new file mode 100644 index 0000000000..f388c16907 --- /dev/null +++ b/storagetransfer/test/utils/index.js @@ -0,0 +1,27 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const bucket = require('./bucket'); +const sample = require('./sample'); +const transfer = require('./transfer'); + +module.exports = { + ...bucket, + ...sample, + ...transfer, +}; diff --git a/storagetransfer/test/utils/sample.js b/storagetransfer/test/utils/sample.js new file mode 100644 index 0000000000..5b891c91cb --- /dev/null +++ b/storagetransfer/test/utils/sample.js @@ -0,0 +1,38 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {execFileSync} = require('child_process'); +const path = require('path'); + +/** + * Runs a sample and returns its output + * + * @param {string} sample name of sample to run. No need for '.js' suffix. + * @param {string[]} args the arguments to pass to the sample + * @returns {string} output of the command + */ +async function runSample(sample, args = []) { + return execFileSync('node', [`${sample}.js`, ...args], { + encoding: 'utf-8', + cwd: path.join(__dirname, '..', '..'), + }); +} + +module.exports = { + runSample, +}; diff --git a/storagetransfer/test/utils/transfer.js b/storagetransfer/test/utils/transfer.js new file mode 100644 index 0000000000..80e11fb451 --- /dev/null +++ b/storagetransfer/test/utils/transfer.js @@ -0,0 +1,118 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const { + StorageTransferServiceClient, + protos, +} = require('@google-cloud/storage-transfer'); + +this.protos = protos.google; +require('@google-cloud/storage-transfer'); + +const {BucketManager} = require('./bucket'); + +class TransferJobManager { + constructor() { + this.bucketManager = new BucketManager(); + this.client = new StorageTransferServiceClient(); + + /** + * List of transferJobs to delete + * + * @type {string[]} + */ + this.transferJobsToCleanup = []; + } + + /** + * Clean up transfer jobs used for testing. + */ + async cleanUp() { + for (const jobName of this.transferJobsToCleanup) { + await this.deleteTransferJob(jobName); + } + + await this.bucketManager.deleteBuckets(); + } + + /** + * Creates a transfer job for testing, including test buckets. + * The job generated is not ran automatically. + * The source bucket and destination buckets do not contain objects. + * + * The job is automatically added for queue for easy clean-up via {#cleanUp}. + */ + async createTestTransferJob() { + const sourceBucket = await this.bucketManager.generateGCSBucket(); + const destBucket = await this.bucketManager.generateGCSBucket(); + + const [transferJob] = await this.client.createTransferJob({ + transferJob: { + projectId: await this.client.getProjectId(), + transferSpec: { + gcsDataSource: { + bucketName: sourceBucket.name, + }, + gcsDataSink: { + bucketName: destBucket.name, + }, + }, + status: 'ENABLED', + }, + }); + + this.transferJobToCleanUp(transferJob.name); + + return { + sourceBucket, + destBucket, + transferJob, + }; + } + + /** + * Deletes a STS transfer job. + * + * @param {string} jobName the name of the STS Job to delete + */ + async deleteTransferJob(jobName) { + await this.client.updateTransferJob({ + projectId: await this.client.getProjectId(), + jobName, + transferJob: { + name: jobName, + status: 'DELETED', + }, + }); + } + + /** + * Adds Transfer Job to queue for easy clean-up via {#cleanUp}. + * + * @param {string} jobName the name of the STS Job to queue + */ + transferJobToCleanUp(jobName) { + this.transferJobsToCleanup.push(jobName); + } + + static get protos() { + return protos.google; + } +} + +module.exports = {TransferJobManager}; diff --git a/storagetransfer/transfer-check.js b/storagetransfer/transfer-check.js new file mode 100644 index 0000000000..6e68ff64d7 --- /dev/null +++ b/storagetransfer/transfer-check.js @@ -0,0 +1,74 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function main(projectId, jobName) { + // [START storagetransfer_transfer_check] + + // Imports the Google Cloud client library + const { + StorageTransferServiceClient, + protos, + } = require('@google-cloud/storage-transfer'); + + // Proto for TransferOperation + const TransferOperation = protos.google.storagetransfer.v1.TransferOperation; + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // The ID of the Google Cloud Platform Project that owns the job + // projectId = 'my-project-id' + + // Storage Transfer Service job name + // jobName = 'transferJobs/1234567890' + + // Creates a client + const client = new StorageTransferServiceClient(); + + /** + * Lists operations for a transfer job. + */ + async function checkLatestTransferOperation() { + const filter = JSON.stringify({ + project_id: projectId, + job_names: [jobName], + }); + + const [operations] = await client.operationsClient.listOperations({ + name: 'transferOperations', + filter, + }); + + console.log(`Transfer operations for ${jobName}:`); + for (const {metadata} of operations) { + const transferOperation = TransferOperation.decode(metadata.value); + + console.dir(transferOperation); + } + } + + checkLatestTransferOperation(); + // [END storagetransfer_transfer_check] +} + +main(...process.argv.slice(2)); + +process.on('unhandledRejection', err => { + console.error(err); + process.exitCode = 1; +});