Skip to content

Commit

Permalink
remove bucket env var req. & stop deleting all buckets
Browse files Browse the repository at this point in the history
  • Loading branch information
stephenplusplus committed Nov 3, 2014
1 parent c10073f commit a049907
Show file tree
Hide file tree
Showing 3 changed files with 80 additions and 67 deletions.
1 change: 0 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ $ npm test
To run the regression tests, first create and configure a project in the Google Developers Console following the [instructions on how to run gcloud-node][elsewhere]. After that, set the following environment variables:

- **GCLOUD_TESTS_PROJECT_ID**: Developers Console project's ID (e.g. bamboo-shift-455)
- **GCLOUD_TESTS_BUCKET_NAME**: The name of the bucket to use for the Cloud Storage API tests
- **GCLOUD_TESTS_KEY**: The path to the JSON key file.

Install the [gcloud command-line tool][gcloudcli] to your machine and use it to create the indexes used in the datastore regression tests with indexes found in `regression/data/index/yaml`:
Expand Down
9 changes: 3 additions & 6 deletions regression/env.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@

'use strict';

if (!process.env.GCLOUD_TESTS_PROJECT_ID &&
!process.env.GCLOUD_TESTS_BUCKET_NAME &&
!process.env.GCLOUD_TESTS_KEY) {
if (!process.env.GCLOUD_TESTS_PROJECT_ID && !process.env.GCLOUD_TESTS_KEY) {
var error = [
'To run the regression tests, you need to set some environment variables.',
'Please check the Contributing guide for instructions.'
Expand All @@ -27,7 +25,6 @@ if (!process.env.GCLOUD_TESTS_PROJECT_ID &&
}

module.exports = {
projectId: process.env.GCLOUD_TESTS_PROJECT_ID,
bucketName: process.env.GCLOUD_TESTS_BUCKET_NAME,
keyFilename: process.env.GCLOUD_TESTS_KEY
keyFilename: process.env.GCLOUD_TESTS_KEY,
projectId: process.env.GCLOUD_TESTS_PROJECT_ID
};
137 changes: 77 additions & 60 deletions regression/storage.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,13 @@ var crypto = require('crypto');
var fs = require('fs');
var request = require('request');
var tmp = require('tmp');
var uuid = require('node-uuid');

var env = require('./env.js');
var storage = require('../lib/storage')(env);

var BUCKET_NAME = generateBucketName();

var files = {
logo: {
path: 'regression/data/CloudPlatform_128px_Retina.png'
Expand All @@ -37,30 +40,6 @@ var files = {
}
};

function setHash(obj, file, done) {
var hash = crypto.createHash('md5');
fs.createReadStream(obj[file].path)
.on('data', hash.update.bind(hash))
.on('end', function() {
obj[file].hash = hash.digest('base64');
done();
});
}

function deleteBucketsAndFiles(callback) {
storage.getBuckets(function(err, buckets) {
if (err) {
callback(err);
return;
}
async.map(buckets, function(bucket, next) {
deleteFiles(bucket, function() {
bucket.delete(next);
});
}, callback);
});
}

function deleteFiles(bucket, callback) {
bucket.getFiles(function(err, files) {
if (err) {
Expand All @@ -73,55 +52,93 @@ function deleteFiles(bucket, callback) {
});
}

function generateBucketName() {
return 'gcloud-test-bucket-temp-' + uuid.v1();
}

function setHash(obj, file, done) {
var hash = crypto.createHash('md5');
fs.createReadStream(obj[file].path)
.on('data', hash.update.bind(hash))
.on('end', function() {
obj[file].hash = hash.digest('base64');
done();
});
}

describe('storage', function() {
var bucket;

before(function(done) {
deleteBucketsAndFiles(function() {
storage.createBucket('new' + Date.now(), function(err, newBucket) {
if (err) {
done(err);
return;
}
bucket = newBucket;
done();
});
storage.createBucket(BUCKET_NAME, function(err, newBucket) {
if (err) {
done(err);
return;
}
bucket = newBucket;
done();
});
});

after(deleteBucketsAndFiles);

describe('creating a bucket', function() {
it('should create a bucket', function(done) {
storage.createBucket('a-new-bucket', function(err, bucket) {
assert.ifError(err);
bucket.delete(done);
});
after(function(done) {
deleteFiles(bucket, function(err) {
if (err) {
done(err);
return;
}
bucket.delete(done);
});
});

describe('getting buckets', function() {
var bucketsToCreate = [
generateBucketName(), generateBucketName(), generateBucketName()
];

before(function(done) {
async.map(bucketsToCreate, storage.createBucket.bind(storage), done);
});

after(function(done) {
async.parallel(bucketsToCreate.map(function(bucket) {
return function(done) {
storage.bucket(bucket).delete(done);
};
}), done);
});

it('should get buckets', function(done) {
var bucketsToCreate = [
'new' + Date.now(),
'newer' + Date.now(),
'newest' + Date.now()
];
async.map(
bucketsToCreate,
storage.createBucket.bind(storage),
function(err) {
assert.ifError(err);
storage.getBuckets(function(err, buckets) {
assert.equal(
buckets.filter(function(bucket) {
return bucketsToCreate.indexOf(bucket.name) > -1;
}).length,
bucketsToCreate.length
);
done();
});
storage.getBuckets(getBucketsHandler);

var createdBuckets = [];
var failedTests = 0;
var MAX_TRIES = 2;

function getBucketsHandler(err, buckets, nextQuery) {
buckets.forEach(function(bucket) {
if (bucketsToCreate.indexOf(bucket.name) > -1) {
createdBuckets.push(bucket);
}
});

function allCreated() {
assert.equal(createdBuckets.length, bucketsToCreate.length);
}

try {
allCreated();
done();
} catch(e) {
failedTests++;

if (failedTests <= MAX_TRIES) {
storage.getBuckets(nextQuery, getBucketsHandler);
} else {
// Crash.
allCreated();
}
}
}
});
});

Expand Down

0 comments on commit a049907

Please sign in to comment.