Skip to content

Commit

Permalink
samples: Add AWS S3-compatible Sample (#118)
Browse files Browse the repository at this point in the history
* samples: Add AWS S3-compatible sample

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* docs: description

* docs: typo

* docs: another typo

* docs: typos

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
danielbankhead and gcf-owl-bot[bot] authored Oct 12, 2022
1 parent 09a575a commit ace1413
Show file tree
Hide file tree
Showing 3 changed files with 231 additions and 0 deletions.
132 changes: 132 additions & 0 deletions storagetransfer/aws-s3-compatible-source-request.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
/**
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

const {protos} = require('@google-cloud/storage-transfer');
const {AuthMethod, NetworkProtocol, RequestModel} =
protos.google.storagetransfer.v1.S3CompatibleMetadata;

async function main(
projectId = 'my-project',
sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default',
sourceBucketName = 'my-bucket-name',
sourcePath = 'path/to/data/',
gcsSinkBucket = 'my-sink-bucket',
gcsPath = 'path/to/data/',
region = 'us-east-1',
endpoint = 'us-east-1.example.com',
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS,
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE,
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4
) {
// [START storagetransfer_transfer_from_s3_compatible_source]

// Imports the Google Cloud client library
const storageTransfer = require('@google-cloud/storage-transfer');

/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// Useful enums for AWS S3-Compatible Transfers
// const {AuthMethod, NetworkProtocol, RequestModel} = storageTransfer.protos.google.storagetransfer.v1.S3CompatibleMetadata;

// Your project id
// const projectId = 'my-project';

// The agent pool associated with the S3-compatible data source. Defaults to the default agent
// const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default';

// The S3-compatible bucket name to transfer data from
// const sourceBucketName = "my-bucket-name";

// The S3-compatible path (object prefix) to transfer data from
// const sourcePath = "path/to/data/";

// The ID of the GCS bucket to transfer data to
// const gcsSinkBucket = "my-sink-bucket";

// The GCS path (object prefix) to transfer data to
// const gcsPath = "path/to/data/";

// The S3 region of the source bucket
// const region = 'us-east-1';

// The S3-compatible endpoint
// const endpoint = "us-east-1.example.com";

// The S3-compatible network protocol
// const protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS;

// The S3-compatible request model
// const requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE;

// The S3-compatible auth method
// const authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4;

// Creates a client
const client = new storageTransfer.StorageTransferServiceClient();

/**
* Creates a transfer from an AWS S3-compatible source to GCS
*/
async function transferFromS3CompatibleSource() {
// Runs the request and creates the job
const [transferJob] = await client.createTransferJob({
transferJob: {
projectId,
transferSpec: {
sourceAgentPoolName,
awsS3CompatibleDataSource: {
region,
s3Metadata: {
authMethod,
protocol,
requestModel,
},
endpoint,
bucketName: sourceBucketName,
path: sourcePath,
},
gcsDataSink: {
bucketName: gcsSinkBucket,
path: gcsPath,
},
},
status: 'ENABLED',
},
});

await client.runTransferJob({
jobName: transferJob.name,
projectId,
});

console.log(
`Created and ran a transfer job from '${sourceBucketName}' to '${gcsSinkBucket}' with name ${transferJob.name}`
);
}

transferFromS3CompatibleSource();
// [END storagetransfer_transfer_from_s3_compatible_source]
}

main(...process.argv.slice(2));

process.on('unhandledRejection', err => {
console.error(err.message);
process.exitCode = 1;
});
91 changes: 91 additions & 0 deletions storagetransfer/test/aws-s3-compatible-source-request.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/**
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

const {assert} = require('chai');
const {after, before, describe, it} = require('mocha');

const {BucketManager, TransferJobManager, runSample} = require('./utils');

describe('aws-s3-compatible-source-request', () => {
const testBucketManager = new BucketManager();
const testTransferJobManager = new TransferJobManager();
const {NetworkProtocol, RequestModel, AuthMethod} =
TransferJobManager.protos.storagetransfer.v1.S3CompatibleMetadata;

let projectId;
let sourceAgentPoolName;
let sourceBucketName;
let sourcePath;
let gcsSinkBucket;
let gcsPath;
let region;
let endpoint;
let protocol;
let requestModel;
let authMethod;

before(async () => {
projectId = await testTransferJobManager.client.getProjectId();

// Use default pool
sourceAgentPoolName = '';

const sourceBucket = await testBucketManager.generateGCSBucket();
sourceBucketName = sourceBucket.name;
sourcePath = 'path/to/data/';

gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name;
gcsPath = 'path/to/data/';

region = sourceBucket.getMetadata().location;
endpoint = sourceBucket.baseUrl;
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS;
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE;
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4;
});

after(async () => {
await testBucketManager.deleteBuckets();
await testTransferJobManager.cleanUp();
});

it('should create a transfer job from an AWS S3-compatible source to GCS', async () => {
const output = await runSample('aws-s3-compatible-source-request', [
projectId,
sourceAgentPoolName,
sourceBucketName,
sourcePath,
gcsSinkBucket,
gcsPath,
region,
endpoint,
protocol,
requestModel,
authMethod,
]);

// If it ran successfully and a job was created, delete it to clean up
const [jobName] = output.match(/transferJobs.*/);
if (jobName) {
testTransferJobManager.transferJobToCleanUp(jobName);
}

// Find at least 1 transfer operation from the transfer job in the output
assert.include(output, 'Created and ran a transfer job');
});
});
8 changes: 8 additions & 0 deletions storagetransfer/test/utils/transfer.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,12 @@

const {
StorageTransferServiceClient,
protos,
} = require('@google-cloud/storage-transfer');

this.protos = protos.google;
require('@google-cloud/storage-transfer');

const {BucketManager} = require('./bucket');

class TransferJobManager {
Expand Down Expand Up @@ -105,6 +109,10 @@ class TransferJobManager {
transferJobToCleanUp(jobName) {
this.transferJobsToCleanup.push(jobName);
}

static get protos() {
return protos.google;
}
}

module.exports = {TransferJobManager};

0 comments on commit ace1413

Please sign in to comment.