diff --git a/system-test/spanner.ts b/system-test/spanner.ts index eab5f4592..489441041 100644 --- a/system-test/spanner.ts +++ b/system-test/spanner.ts @@ -75,7 +75,7 @@ describe('Spanner', () => { : spanner.instance(generateName('instance')); const INSTANCE_CONFIG = { - config: 'regional-us-west1', + config: 'regional-us-west2', nodes: 1, labels: { [LABEL]: 'true', @@ -88,6 +88,7 @@ describe('Spanner', () => { const RESOURCES_TO_CLEAN: Array = []; const DATABASE = instance.database(generateName('database'), {incStep: 1}); const TABLE_NAME = 'Singers'; + const PG_DATABASE = instance.database(generateName('pg-db'), {incStep: 1}); before(async () => { await deleteOldTestInstances(); @@ -100,7 +101,7 @@ describe('Spanner', () => { `Not creating temp instance, using + ${instance.formattedName_}...` ); } - const [, operation] = await DATABASE.create({ + const [, googleSqlOperation] = await DATABASE.create({ schema: ` CREATE TABLE ${TABLE_NAME} ( SingerId STRING(1024) NOT NULL, @@ -108,8 +109,29 @@ describe('Spanner', () => { ) PRIMARY KEY(SingerId)`, gaxOptions: GAX_OPTIONS, }); - await operation.promise(); + await googleSqlOperation.promise(); RESOURCES_TO_CLEAN.push(DATABASE); + + if (!IS_EMULATOR_ENABLED) { + const [pg_database, postgreSqlOperation] = await PG_DATABASE.create({ + databaseDialect: Spanner.POSTGRESQL, + gaxOptions: GAX_OPTIONS, + }); + await postgreSqlOperation.promise(); + const schema = [ + ` + CREATE TABLE ${TABLE_NAME} ( + SingerId VARCHAR(1024) NOT NULL, + Name VARCHAR(1024), + PRIMARY KEY (SingerId) + );`, + ]; + const [postgreSqlOperationUpdateDDL] = await pg_database.updateSchema( + schema + ); + await postgreSqlOperationUpdateDDL.promise(); + RESOURCES_TO_CLEAN.push(PG_DATABASE); + } }); after(async () => { @@ -147,98 +169,127 @@ describe('Spanner', () => { describe('types', () => { const TABLE_NAME = 'TypeCheck'; - const table = DATABASE.table(TABLE_NAME); + const googleSqlTable = DATABASE.table(TABLE_NAME); + const postgreSqlTable = PG_DATABASE.table(TABLE_NAME); - function insert(insertData, callback) { + function insert(insertData, dialect, callback) { const id = generateName('id'); insertData.Key = id; + let table = googleSqlTable; + let query: ExecuteSqlRequest = { + sql: 'SELECT * FROM `' + table.name + '` WHERE Key = @id', + params: { + id, + }, + }; + let database = DATABASE; + if (dialect === Spanner.POSTGRESQL) { + table = postgreSqlTable; + query = { + sql: 'SELECT * FROM ' + table.name + ' WHERE "Key" = $1', + params: { + p1: id, + }, + }; + database = PG_DATABASE; + } table.insert(insertData, (err, insertResp) => { if (err) { callback(err); return; } - DATABASE.run( - { - sql: 'SELECT * FROM `' + table.name + '` WHERE Key = @id', - params: { - id, - }, - }, - (err, rows, readResp) => { - if (err) { - callback(err); - return; - } - - callback(null, rows.shift(), insertResp, readResp); + database.run(query, (err, rows, readResp) => { + if (err) { + callback(err); + return; } - ); + + callback(null, rows.shift(), insertResp, readResp); + }); }); } - before(done => { + before(async () => { if (IS_EMULATOR_ENABLED) { - DATABASE.updateSchema( + const [googleSqlOperationUpdateDDL] = await DATABASE.updateSchema( ` - CREATE TABLE ${TABLE_NAME} ( - Key STRING(MAX) NOT NULL, - BytesValue BYTES(MAX), - BoolValue BOOL, - DateValue DATE, - FloatValue FLOAT64, - IntValue INT64, - NumericValue NUMERIC, - StringValue STRING(MAX), - TimestampValue TIMESTAMP, - BytesArray ARRAY, - BoolArray ARRAY, - DateArray ARRAY, - FloatArray ARRAY, - IntArray ARRAY, - NumericArray ARRAY, - StringArray ARRAY, - TimestampArray ARRAY, - CommitTimestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true) - ) PRIMARY KEY (Key) - `, - execAfterOperationComplete(done) + CREATE TABLE ${TABLE_NAME} + ( + Key STRING( MAX) NOT NULL, + BytesValue BYTES( MAX), + BoolValue BOOL, + DateValue DATE, + FloatValue FLOAT64, + IntValue INT64, + NumericValue NUMERIC, + StringValue STRING( MAX), + TimestampValue TIMESTAMP, + BytesArray ARRAY, + BoolArray ARRAY, + DateArray ARRAY< DATE >, + FloatArray ARRAY, + IntArray ARRAY, + NumericArray ARRAY< NUMERIC >, + StringArray ARRAY, + TimestampArray ARRAY< TIMESTAMP >, + CommitTimestamp TIMESTAMP OPTIONS (allow_commit_timestamp= true) + ) PRIMARY KEY (Key) + ` ); + await googleSqlOperationUpdateDDL.promise(); } else { - DATABASE.updateSchema( + const [googleSqlOperationUpdateDDL] = await DATABASE.updateSchema( ` - CREATE TABLE ${TABLE_NAME} ( - Key STRING(MAX) NOT NULL, - BytesValue BYTES(MAX), - BoolValue BOOL, - DateValue DATE, - FloatValue FLOAT64, - JsonValue JSON, - IntValue INT64, - NumericValue NUMERIC, - StringValue STRING(MAX), - TimestampValue TIMESTAMP, - BytesArray ARRAY, - BoolArray ARRAY, - DateArray ARRAY, - FloatArray ARRAY, - JsonArray ARRAY, - IntArray ARRAY, - NumericArray ARRAY, - StringArray ARRAY, - TimestampArray ARRAY, - CommitTimestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true) + CREATE TABLE ${TABLE_NAME} + ( + Key STRING( MAX) NOT NULL, + BytesValue BYTES( MAX), + BoolValue BOOL, + DateValue DATE, + FloatValue FLOAT64, + JsonValue JSON, + IntValue INT64, + NumericValue NUMERIC, + StringValue STRING( MAX), + TimestampValue TIMESTAMP, + BytesArray ARRAY, + BoolArray ARRAY, + DateArray ARRAY< DATE >, + FloatArray ARRAY, + JsonArray ARRAY, + IntArray ARRAY, + NumericArray ARRAY< NUMERIC >, + StringArray ARRAY, + TimestampArray ARRAY< TIMESTAMP >, + CommitTimestamp TIMESTAMP OPTIONS (allow_commit_timestamp= true) ) PRIMARY KEY (Key) - `, - execAfterOperationComplete(done) + ` ); + await googleSqlOperationUpdateDDL.promise(); + const [postgreSqlOperationUpdateDDL] = await PG_DATABASE.updateSchema( + ` + CREATE TABLE ${TABLE_NAME} + ( + "Key" VARCHAR NOT NULL PRIMARY KEY, + "BytesValue" BYTEA, + "BoolValue" BOOL, + "FloatValue" DOUBLE PRECISION, + "IntValue" BIGINT, + "NumericValue" NUMERIC, + "StringValue" VARCHAR, + "TimestampValue" TIMESTAMPTZ + ); + ` + ); + await postgreSqlOperationUpdateDDL.promise(); } }); describe('uneven rows', () => { - it('should allow differently-ordered rows', done => { + const differentlyOrderedRows = (done, dialect) => { const data = [ { Key: generateName('id'), @@ -252,37 +303,60 @@ describe('Spanner', () => { }, ]; + let table = googleSqlTable; + let database = DATABASE; + let query: ExecuteSqlRequest = { + sql: `SELECT * FROM \`${table.name}\` WHERE Key = @a OR KEY = @b`, + params: { + a: data[0].Key, + b: data[1].Key, + }, + }; + if (dialect === Spanner.POSTGRESQL) { + table = postgreSqlTable; + database = PG_DATABASE; + query = { + sql: `SELECT * FROM ${table.name} WHERE "Key" = $1 OR "Key" = $2`, + params: { + p1: data[0].Key, + p2: data[1].Key, + }, + }; + } + table.insert(data, err => { assert.ifError(err); - DATABASE.run( - { - sql: `SELECT * FROM \`${table.name}\` WHERE Key = @a OR KEY = @b`, - params: { - a: data[0].Key, - b: data[1].Key, - }, - }, - (err, rows) => { - assert.ifError(err); + database.run(query, (err, rows) => { + assert.ifError(err); - const row1 = rows![0].toJSON(); - assert.deepStrictEqual(row1.IntValue, data[0].IntValue); - assert.deepStrictEqual(row1.BoolValue, data[0].BoolValue); + const row1 = rows![0].toJSON(); + assert.deepStrictEqual(row1.IntValue, data[0].IntValue); + assert.deepStrictEqual(row1.BoolValue, data[0].BoolValue); - const row2 = rows![1].toJSON(); - assert.deepStrictEqual(row2.IntValue, data[1].IntValue); - assert.deepStrictEqual(row2.BoolValue, data[1].BoolValue); + const row2 = rows![1].toJSON(); + assert.deepStrictEqual(row2.IntValue, data[1].IntValue); + assert.deepStrictEqual(row2.BoolValue, data[1].BoolValue); - done(); - } - ); + done(); + }); }); + }; + + it('GOOGLE_STANDARD_SQL should allow differently-ordered rows', done => { + differentlyOrderedRows(done, Spanner.GOOGLE_STANDARD_SQL); + }); + + it('POSTGRESQL should allow differently-ordered rows}', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + differentlyOrderedRows(done, Spanner.POSTGRESQL); }); }); describe('structs', () => { - it('should correctly decode structs', done => { + it('GOOGLE_STANDARD_SQL should correctly decode structs', done => { const query = 'SELECT ARRAY(SELECT as struct 1, "hello")'; DATABASE.run(query, (err, rows) => { @@ -321,7 +395,7 @@ describe('Spanner', () => { }); }); - it('should correctly decode structs', done => { + it('GOOGLE_STANDARD_SQL should correctly decode structs', done => { const query = 'SELECT 1 as id, ARRAY(select as struct 2 as id, "hello" as name)'; @@ -373,68 +447,103 @@ describe('Spanner', () => { }); describe('booleans', () => { - it('should write boolean values', done => { - insert({BoolValue: true}, (err, row) => { + const booleanInsert = (done, dialect, value) => { + insert({BoolValue: value}, dialect, (err, row) => { assert.ifError(err); - assert.strictEqual(row.toJSON().BoolValue, true); + assert.strictEqual(row.toJSON().BoolValue, value); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write boolean values', done => { + booleanInsert(done, Spanner.GOOGLE_STANDARD_SQL, true); }); - it('should write null boolean values', done => { - insert({BoolValue: null}, (err, row) => { - assert.ifError(err); - assert.strictEqual(row.toJSON().BoolValue, null); - done(); - }); + it('POSTGRESQL should write boolean values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + booleanInsert(done, Spanner.POSTGRESQL, true); + }); + + it('GOOGLE_STANDARD_SQL should write null boolean values', done => { + booleanInsert(done, Spanner.GOOGLE_STANDARD_SQL, null); }); - it('should write empty boolean array values', done => { - insert({BoolArray: []}, (err, row) => { + it('POSTGRESQL should write null boolean values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + booleanInsert(done, Spanner.POSTGRESQL, null); + }); + + it('GOOGLE_STANDARD_SQL should write empty boolean array values', done => { + insert({BoolArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().BoolArray, []); done(); }); }); - it('should write null boolean array values', done => { - insert({BoolArray: [null]}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null boolean array values', done => { + insert({BoolArray: [null]}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().BoolArray, [null]); done(); }); }); - it('should write boolean array values', done => { - insert({BoolArray: [true, false]}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().BoolArray, [true, false]); - done(); - }); + it('GOOGLE_STANDARD_SQL should write boolean array values', done => { + insert( + {BoolArray: [true, false]}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().BoolArray, [true, false]); + done(); + } + ); }); }); describe('int64s', () => { - it('should write int64 values', done => { - insert({IntValue: Spanner.int(1234)}, (err, row) => { + const int64Insert = (done, dialect, value) => { + insert({IntValue: value}, dialect, (err, row) => { assert.ifError(err); - assert.deepStrictEqual(row.toJSON().IntValue, 1234); + if (value) { + value = value.valueOf(); + } + assert.deepStrictEqual(row.toJSON().IntValue, value); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write int64 values', done => { + int64Insert(done, Spanner.GOOGLE_STANDARD_SQL, Spanner.int(1234)); }); - it('should write null int64 values', done => { - insert({IntValue: null}, (err, row) => { - assert.ifError(err); - assert.strictEqual(row.toJSON().IntValue, null); - done(); - }); + it('POSTGRESQL should write int64 values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + int64Insert(done, Spanner.POSTGRESQL, Spanner.int(1234)); }); - it('should throw for of bounds integers', done => { + it('GOOGLE_STANDARD_SQL should write null int64 values', done => { + int64Insert(done, Spanner.GOOGLE_STANDARD_SQL, null); + }); + + it('POSTGRESQL should write null int64 values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + int64Insert(done, Spanner.POSTGRESQL, null); + }); + + const int64OutOfBounds = (done, dialect) => { const value = '9223372036854775807'; - insert({IntValue: value}, (err, row) => { + insert({IntValue: value}, dialect, (err, row) => { assert.ifError(err); assert.throws(() => { @@ -443,40 +552,62 @@ describe('Spanner', () => { done(); }); + }; + + it('GOOGLE_STANDARD_SQL should throw for of bounds integers', done => { + int64OutOfBounds(done, Spanner.GOOGLE_STANDARD_SQL); + }); + + it('POSTGRESQL should throw for of bounds integers', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + int64OutOfBounds(done, Spanner.POSTGRESQL); }); - it('should optionally wrap out of bounds integers', done => { + const int64WrapOutOfBounds = (done, dialect) => { const value = '9223372036854775807'; - insert({IntValue: value}, (err, row) => { + insert({IntValue: value}, dialect, (err, row) => { assert.ifError(err); const expected = Spanner.int(value); const actual = row.toJSON({wrapNumbers: true}).IntValue; assert.deepStrictEqual(actual, expected); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should optionally wrap out of bounds integers', done => { + int64WrapOutOfBounds(done, Spanner.GOOGLE_STANDARD_SQL); }); - it('should write empty in64 array values', done => { - insert({IntArray: []}, (err, row) => { + it('POSTGRESQL should optionally wrap out of bounds integers', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + int64WrapOutOfBounds(done, Spanner.POSTGRESQL); + }); + + it('GOOGLE_STANDARD_SQL should write empty in64 array values', done => { + insert({IntArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().IntArray, []); done(); }); }); - it('should write null int64 array values', done => { - insert({IntArray: [null]}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null int64 array values', done => { + insert({IntArray: [null]}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().IntArray, [null]); done(); }); }); - it('should write int64 array values', done => { + it('GOOGLE_STANDARD_SQL should write int64 array values', done => { const values = [1, 2, 3]; - insert({IntArray: values}, (err, row) => { + insert({IntArray: values}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().IntArray, values); done(); @@ -485,221 +616,356 @@ describe('Spanner', () => { }); describe('float64s', () => { - it('should write float64 values', done => { - insert({FloatValue: 8.2}, (err, row) => { + const float64Insert = (done, dialect, value) => { + insert({FloatValue: value}, dialect, (err, row) => { assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatValue, 8.2); + if (typeof value === 'object' && value !== null) { + value = value.value; + } + assert.deepStrictEqual(row.toJSON().FloatValue, value); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write float64 values', done => { + float64Insert(done, Spanner.GOOGLE_STANDARD_SQL, 8.2); }); - it('should write null float64 values', done => { - insert({FloatValue: null}, (err, row) => { - assert.ifError(err); - assert.strictEqual(row.toJSON().FloatValue, null); - done(); - }); + it('POSTGRESQL should write float64 values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + float64Insert(done, Spanner.POSTGRESQL, 8.2); }); - it('should accept a Float object with an Int-like value', done => { - insert({FloatValue: Spanner.float(8)}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatValue, 8); - done(); - }); + it('GOOGLE_STANDARD_SQL should write null float64 values', done => { + float64Insert(done, Spanner.GOOGLE_STANDARD_SQL, null); }); - it('should handle Infinity', done => { - insert({FloatValue: Infinity}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatValue, Infinity); - done(); - }); + it('POSTGRESQL should write null float64 values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + float64Insert(done, Spanner.POSTGRESQL, null); }); - it('should handle -Infinity', done => { - insert({FloatValue: -Infinity}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatValue, -Infinity); - done(); - }); + it('GOOGLE_STANDARD_SQL should accept a Float object with an Int-like value', done => { + float64Insert(done, Spanner.GOOGLE_STANDARD_SQL, Spanner.float(8)); }); - it('should handle NaN', done => { - insert({FloatValue: NaN}, (err, row) => { - assert.ifError(err); - assert(isNaN(row.toJSON().FloatValue)); - done(); - }); + it('POSTGRESQL should accept a Float object with an Int-like value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + float64Insert(done, Spanner.POSTGRESQL, Spanner.float(8)); + }); + + it('GOOGLE_STANDARD_SQL should handle Infinity', done => { + float64Insert(done, Spanner.GOOGLE_STANDARD_SQL, Infinity); + }); + + it('POSTGRESQL should handle Infinity', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + float64Insert(done, Spanner.POSTGRESQL, Infinity); }); - it('should write empty float64 array values', done => { - insert({FloatArray: []}, (err, row) => { + it('GOOGLE_STANDARD_SQL should handle -Infinity', done => { + float64Insert(done, Spanner.GOOGLE_STANDARD_SQL, -Infinity); + }); + + it('POSTGRESQL should handle -Infinity', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + float64Insert(done, Spanner.POSTGRESQL, -Infinity); + }); + + it('GOOGLE_STANDARD_SQL should handle NaN', done => { + float64Insert(done, Spanner.GOOGLE_STANDARD_SQL, NaN); + }); + + it('POSTGRESQL should handle NaN', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + float64Insert(done, Spanner.POSTGRESQL, NaN); + }); + + it('GOOGLE_STANDARD_SQL should write empty float64 array values', done => { + insert({FloatArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().FloatArray, []); done(); }); }); - it('should write null float64 array values', done => { - insert({FloatArray: [null]}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatArray, [null]); - done(); - }); + it('GOOGLE_STANDARD_SQL should write null float64 array values', done => { + insert( + {FloatArray: [null]}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().FloatArray, [null]); + done(); + } + ); }); - it('should write float64 array values', done => { + it('GOOGLE_STANDARD_SQL should write float64 array values', done => { const values = [1.2, 2.3, 3.4]; - insert({FloatArray: values}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatArray, values); - done(); - }); + insert( + {FloatArray: values}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().FloatArray, values); + done(); + } + ); }); }); describe('numerics', () => { - it('should write numeric values', done => { - const value = Spanner.numeric('3.141592653'); - - insert({NumericValue: value}, (err, row) => { + const numericInsert = (done, dialect, value) => { + insert({NumericValue: value}, dialect, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().NumericValue, value); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write numeric values', done => { + numericInsert( + done, + Spanner.GOOGLE_STANDARD_SQL, + Spanner.numeric('3.141592653') + ); }); - it('should write null numeric values', done => { - insert({NumericValue: null}, (err, row) => { - assert.ifError(err); - assert.strictEqual(row.toJSON().NumericValue, null); - done(); - }); + it('POSTGRESQL should write numeric values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + numericInsert( + done, + Spanner.POSTGRESQL, + Spanner.pgNumeric('3.141592653') + ); + }); + + it('GOOGLE_STANDARD_SQL should write null numeric values', done => { + numericInsert(done, Spanner.GOOGLE_STANDARD_SQL, null); + }); + + it('POSTGRESQL should write null numeric values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + numericInsert(done, Spanner.POSTGRESQL, null); + }); + + it('POSTGRESQL should bind NaN', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + numericInsert(done, Spanner.POSTGRESQL, Spanner.pgNumeric('NaN')); }); - it('should throw for out of bounds values', done => { - insert({NumericValue: Spanner.numeric('3.1415926535')}, err => { + const numericInsertOutOfBounds = (done, dialect, value) => { + insert({NumericValue: value}, dialect, err => { assert.strictEqual(err.code, grpc.status.FAILED_PRECONDITION); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should throw for out of bounds values', done => { + numericInsertOutOfBounds( + done, + Spanner.GOOGLE_STANDARD_SQL, + Spanner.numeric('3.1415926535') + ); }); - it('should write empty numeric array values', done => { - insert({NumericArray: []}, (err, row) => { + it('POSTGRESQL should throw for out of bounds values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + numericInsertOutOfBounds( + done, + Spanner.POSTGRESQL, + Spanner.pgNumeric('1e131072') + ); + }); + + it('GOOGLE_STANDARD_SQL should write empty numeric array values', done => { + insert({NumericArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().NumericArray, []); done(); }); }); - it('should write null numeric array values', done => { - insert({NumericArray: [null]}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().NumericArray, [null]); - done(); - }); + it('GOOGLE_STANDARD_SQL should write null numeric array values', done => { + insert( + {NumericArray: [null]}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().NumericArray, [null]); + done(); + } + ); }); - it('should write numeric array values', done => { + it('GOOGLE_STANDARD_SQL should write numeric array values', done => { const values = [ Spanner.numeric('-99999999999999999999999999999.999999999'), Spanner.numeric('3.141592653'), Spanner.numeric('99999999999999999999999999999.999999999'), ]; - insert({NumericArray: values}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().NumericArray, values); - done(); - }); + insert( + {NumericArray: values}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().NumericArray, values); + done(); + } + ); }); }); describe('strings', () => { - it('should write string values', done => { - insert({StringValue: 'abc'}, (err, row) => { + const stringInsert = (done, dialect, value) => { + insert({StringValue: value}, dialect, (err, row) => { assert.ifError(err); - assert.strictEqual(row.toJSON().StringValue, 'abc'); + assert.strictEqual(row.toJSON().StringValue, value); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write string values', done => { + stringInsert(done, Spanner.GOOGLE_STANDARD_SQL, 'abc'); }); - it('should write null string values', done => { - insert({StringValue: null}, (err, row) => { - assert.ifError(err); - assert.strictEqual(row.toJSON().StringValue, null); - done(); - }); + it('POSTGRESQL should write string values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + stringInsert(done, Spanner.POSTGRESQL, 'abc'); }); - it('should write empty string array values', done => { - insert({StringArray: []}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null string values', done => { + stringInsert(done, Spanner.GOOGLE_STANDARD_SQL, null); + }); + + it('POSTGRESQL should write null string values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + stringInsert(done, Spanner.POSTGRESQL, null); + }); + + it('GOOGLE_STANDARD_SQL should write empty string array values', done => { + insert({StringArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().StringArray, []); done(); }); }); - it('should write null string array values', done => { - insert({StringArray: [null]}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().StringArray, [null]); - done(); - }); + it('GOOGLE_STANDARD_SQL should write null string array values', done => { + insert( + {StringArray: [null]}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().StringArray, [null]); + done(); + } + ); }); - it('should write string array values', done => { - insert({StringArray: ['abc', 'def']}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().StringArray, ['abc', 'def']); - done(); - }); + it('GOOGLE_STANDARD_SQL should write string array values', done => { + insert( + {StringArray: ['abc', 'def']}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().StringArray, ['abc', 'def']); + done(); + } + ); }); }); describe('bytes', () => { - it('should write bytes values', done => { - insert({BytesValue: Buffer.from('abc')}, (err, row) => { + const bytesInsert = (done, dialect, value) => { + insert({BytesValue: value}, dialect, (err, row) => { assert.ifError(err); - assert.deepStrictEqual(row.toJSON().BytesValue, Buffer.from('abc')); + assert.deepStrictEqual(row.toJSON().BytesValue, value); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write bytes values', done => { + bytesInsert(done, Spanner.GOOGLE_STANDARD_SQL, Buffer.from('abc')); }); - it('should write null bytes values', done => { - insert({BytesValue: null}, (err, row) => { - assert.ifError(err); - assert.strictEqual(row.toJSON().BytesValue, null); - done(); - }); + it('POSTGRESQL should write bytes values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + bytesInsert(done, Spanner.POSTGRESQL, Buffer.from('abc')); + }); + + it('GOOGLE_STANDARD_SQL should write null bytes values', done => { + bytesInsert(done, Spanner.GOOGLE_STANDARD_SQL, null); }); - it('should write empty bytes array values', done => { - insert({BytesArray: []}, (err, row) => { + it('POSTGRESQL should write null bytes values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + bytesInsert(done, Spanner.POSTGRESQL, null); + }); + + it('GOOGLE_STANDARD_SQL should write empty bytes array values', done => { + insert({BytesArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().BytesArray, []); done(); }); }); - it('should write null bytes array values', done => { - insert({BytesArray: [null]}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().BytesArray, [null]); - done(); - }); + it('GOOGLE_STANDARD_SQL should write null bytes array values', done => { + insert( + {BytesArray: [null]}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().BytesArray, [null]); + done(); + } + ); }); - it('should write bytes array values', done => { + it('GOOGLE_STANDARD_SQL should write bytes array values', done => { const values = [Buffer.from('a'), Buffer.from('b')]; - insert({BytesArray: values}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().BytesArray, values); - done(); - }); + insert( + {BytesArray: values}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().BytesArray, values); + done(); + } + ); }); }); @@ -710,44 +976,49 @@ describe('Spanner', () => { } }); - it('should write json values', done => { - insert({JsonValue: {key1: 'value1', key2: 'value2'}}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().JsonValue, { - key1: 'value1', - key2: 'value2', - }); - done(); - }); + it('GOOGLE_STANDARD_SQL should write json values', done => { + insert( + {JsonValue: {key1: 'value1', key2: 'value2'}}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().JsonValue, { + key1: 'value1', + key2: 'value2', + }); + done(); + } + ); }); - it('should write null json values', done => { - insert({JsonValue: null}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null json values', done => { + insert({JsonValue: null}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.strictEqual(row.toJSON().JsonValue, null); done(); }); }); - it('should write empty json array values', done => { - insert({JsonArray: []}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write empty json array values', done => { + insert({JsonArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().JsonArray, []); done(); }); }); - it('should write null json array values', done => { - insert({JsonArray: [null]}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null json array values', done => { + insert({JsonArray: [null]}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().JsonArray, [null]); done(); }); }); - it('should write json array values', done => { + it('GOOGLE_STANDARD_SQL should write json array values', done => { insert( {JsonArray: [{key1: 'value1'}, {key2: 'value2'}]}, + Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().JsonArray, [ @@ -761,91 +1032,125 @@ describe('Spanner', () => { }); describe('timestamps', () => { - it('should write timestamp values', done => { + const timestampInsert = (done, dialect) => { const date = Spanner.timestamp(); - insert({TimestampValue: date}, (err, row) => { + insert({TimestampValue: date}, dialect, (err, row) => { assert.ifError(err); const time = row.toJSON().TimestampValue.getTime(); assert.strictEqual(time, date.getTime()); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write timestamp values', done => { + timestampInsert(done, Spanner.GOOGLE_STANDARD_SQL); }); - it('should write null timestamp values', done => { - insert({TimestampValue: null}, (err, row) => { + it('POSTGRESQL should write timestamp values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + timestampInsert(done, Spanner.POSTGRESQL); + }); + + const timestampInsertNull = (done, dialect) => { + insert({TimestampValue: null}, dialect, (err, row) => { assert.ifError(err); assert.strictEqual(row.toJSON().TimestampValue, null); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should write null timestamp values', done => { + timestampInsertNull(done, Spanner.GOOGLE_STANDARD_SQL); }); - it('should write empty timestamp array values', done => { - insert({TimestampArray: []}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().TimestampArray, []); - done(); - }); + it('POSTGRESQL should write null timestamp values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + timestampInsertNull(done, Spanner.POSTGRESQL); }); - it('should write null timestamp array values', done => { - insert({TimestampArray: [null]}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().TimestampArray, [null]); - done(); - }); + it('GOOGLE_STANDARD_SQL should write empty timestamp array values', done => { + insert( + {TimestampArray: []}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().TimestampArray, []); + done(); + } + ); }); - it('should write timestamp array values', done => { + it('GOOGLE_STANDARD_SQL should write null timestamp array values', done => { + insert( + {TimestampArray: [null]}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().TimestampArray, [null]); + done(); + } + ); + }); + + it('GOOGLE_STANDARD_SQL should write timestamp array values', done => { const values = [Spanner.timestamp(), Spanner.timestamp('3-3-1933')]; - insert({TimestampArray: values}, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().TimestampArray, values); - done(); - }); + insert( + {TimestampArray: values}, + Spanner.GOOGLE_STANDARD_SQL, + (err, row) => { + assert.ifError(err); + assert.deepStrictEqual(row.toJSON().TimestampArray, values); + done(); + } + ); }); }); describe('dates', () => { - it('should write date values', done => { + it('GOOGLE_STANDARD_SQL should write date values', done => { const date = Spanner.date(); - insert({DateValue: date}, (err, row) => { + insert({DateValue: date}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(Spanner.date(row.toJSON().DateValue), date); done(); }); }); - it('should write null date values', done => { - insert({DateValue: null}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null date values', done => { + insert({DateValue: null}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.strictEqual(row.toJSON().DateValue, null); done(); }); }); - it('should write empty date array values', done => { - insert({DateArray: []}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write empty date array values', done => { + insert({DateArray: []}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().DateArray, []); done(); }); }); - it('should write null date array values', done => { - insert({DateArray: [null]}, (err, row) => { + it('GOOGLE_STANDARD_SQL should write null date array values', done => { + insert({DateArray: [null]}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().DateArray, [null]); done(); }); }); - it('should write date array values', done => { + it('GOOGLE_STANDARD_SQL should write date array values', done => { const values = [Spanner.date(), Spanner.date('3-3-1933')]; - insert({DateArray: values}, (err, row) => { + insert({DateArray: values}, Spanner.GOOGLE_STANDARD_SQL, (err, row) => { assert.ifError(err); const {DateArray} = row.toJSON(); assert.deepStrictEqual(DateArray, values); @@ -855,26 +1160,41 @@ describe('Spanner', () => { }); describe('commit timestamp', () => { - it('should accept the commit timestamp placeholder', done => { + it('GOOGLE_STANDARD_SQL should accept the commit timestamp placeholder', done => { const data = {CommitTimestamp: Spanner.COMMIT_TIMESTAMP}; - insert(data, (err, row, {commitTimestamp}) => { - assert.ifError(err); + insert( + data, + Spanner.GOOGLE_STANDARD_SQL, + (err, row, {commitTimestamp}) => { + assert.ifError(err); - const timestampFromCommit = Spanner.timestamp(commitTimestamp); - const timestampFromRead = row.toJSON().CommitTimestamp; + const timestampFromCommit = Spanner.timestamp(commitTimestamp); + const timestampFromRead = row.toJSON().CommitTimestamp; - assert.deepStrictEqual(timestampFromCommit, timestampFromRead); - done(); - }); + assert.deepStrictEqual(timestampFromCommit, timestampFromRead); + done(); + } + ); }); }); - it('should throw an error for incorrect value types', done => { + const incorrectValueType = (done, table) => { table.insert({BoolValue: 'abc'}, err => { assert(err); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should throw an error for incorrect value types', done => { + incorrectValueType(done, googleSqlTable); + }); + + it('POSTGRESQL should throw an error for incorrect value types', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + incorrectValueType(done, postgreSqlTable); }); }); @@ -1050,23 +1370,53 @@ describe('Spanner', () => { describe('Databases', () => { const TABLE_NAME = 'SingersTest'; - it('should auto create a database', done => { - const database = instance.database(generateName('database')); + const autoCreateDatabase = (done, databaseId) => { + const database = instance.database(generateName(databaseId)); database.get({autoCreate: true} as GetDatabaseConfig, err => { assert.ifError(err); RESOURCES_TO_CLEAN.push(database); database.getMetadata(done); }); + }; + + it('GOOGLE_STANDARD_SQL should auto create a database', done => { + autoCreateDatabase(done, 'database'); }); - it('should have created the database', done => { - DATABASE.getMetadata((err, metadata) => { + it('POSTGRESQL should auto create a database', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + autoCreateDatabase(done, 'pg-db'); + }); + + const createDatabase = (done, database, dialect) => { + database.getMetadata((err, metadata) => { assert.ifError(err); - assert.strictEqual(metadata!.name, DATABASE.formattedName_); + assert.strictEqual(metadata!.name, database.formattedName_); assert.strictEqual(metadata!.state, 'READY'); + if (IS_EMULATOR_ENABLED) { + assert.strictEqual( + metadata!.databaseDialect, + 'DATABASE_DIALECT_UNSPECIFIED' + ); + } else { + assert.strictEqual(metadata!.databaseDialect, dialect); + } done(); }); + }; + + it('GOOGLE_STANDARD_SQL should have created the database', done => { + createDatabase(done, DATABASE, 'GOOGLE_STANDARD_SQL'); + }); + + it('POSTGRESQL should have created the database', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + createDatabase(done, PG_DATABASE, 'POSTGRESQL'); }); it('should list the databases from an instance', done => { @@ -1116,37 +1466,65 @@ describe('Spanner', () => { }); }); - it('should create a table', done => { - const createTableStatement = ` - CREATE TABLE ${TABLE_NAME} ( - SingerId INT64 NOT NULL, - FirstName STRING(1024), - LastName STRING(1024), - SingerInfo BYTES(MAX), - ) PRIMARY KEY(SingerId)`; - - DATABASE.updateSchema( + const createTable = (done, database, dialect, createTableStatement) => { + database.updateSchema( [createTableStatement], execAfterOperationComplete(err => { assert.ifError(err); - function replaceNewLinesAndSpacing(str) { - return str.replace(/\n\s*/g, '').replace(/\s+/g, ' '); + function replaceNewLinesAndSpacing(str, dialect) { + const schema = str.replace(/\n\s*/g, '').replace(/\s+/g, ' '); + if (dialect === Spanner.GOOGLE_STANDARD_SQL) { + return schema; + } else { + return schema.toLowerCase(); + } } - DATABASE.getSchema((err, statements) => { + database.getSchema((err, statements) => { assert.ifError(err); assert.ok( statements!.some( s => - replaceNewLinesAndSpacing(s) === - replaceNewLinesAndSpacing(createTableStatement) + replaceNewLinesAndSpacing(s, dialect) === + replaceNewLinesAndSpacing(createTableStatement, dialect) ) ); done(); }); }) ); + }; + + it('GOOGLE_STANDARD_SQL should create a table', done => { + const createTableStatement = ` + CREATE TABLE ${TABLE_NAME} ( + SingerId INT64 NOT NULL, + FirstName STRING(1024), + LastName STRING(1024), + SingerInfo BYTES(MAX), + ) PRIMARY KEY(SingerId)`; + createTable( + done, + DATABASE, + Spanner.GOOGLE_STANDARD_SQL, + createTableStatement + ); + }); + + it('POSTGRESQL should create a table', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const createTableStatement = ` + CREATE TABLE ${TABLE_NAME} ( + SingerId BIGINT NOT NULL, + FirstName CHARACTER VARYING, + LastName CHARACTER VARYING, + SingerInfo BYTEA, + PRIMARY KEY(SingerId) + )`; + createTable(done, PG_DATABASE, Spanner.POSTGRESQL, createTableStatement); }); it('should list database operations on an instance', async function () { @@ -1176,16 +1554,13 @@ describe('Spanner', () => { assert.strictEqual(createMeta.database, databaseFullName); }); - it('should list database operations on a database', async function () { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } + const listDatabaseOperation = async function (database) { // Look up the database full name from the metadata to expand any {{projectId}} tokens. - const [databaseMetadata] = await DATABASE.getMetadata(); + const [databaseMetadata] = await database.getMetadata(); const databaseFullName = databaseMetadata.name; // List operations. - const [databaseOperations] = await DATABASE.getOperations(); + const [databaseOperations] = await database.getOperations(); // Validate operation has at least the create operation for the database. assert.ok(databaseOperations.length > 0); @@ -1198,19 +1573,46 @@ describe('Spanner', () => { databaseCreateOperation!.metadata!.value! as Uint8Array ); assert.strictEqual(createMeta.database, databaseFullName); + }; + + it('GOOGLE_STANDARD_SQL should list database operations on a database', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await listDatabaseOperation(DATABASE); + }); + + it('POSTGRESQL should list database operations on a database', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await listDatabaseOperation(PG_DATABASE); }); }); describe('Backups', () => { - let database1: Database; - let database2: Database; - let restoreDatabase: Database; + const SKIP_POSTGRESQL_BACKUP_TESTS = true; + + let googleSqlDatabase1: Database; + let googleSqlDatabase2: Database; + let googleSqlRestoreDatabase: Database; + + let postgreSqlDatabase1: Database; + let postgreSqlDatabase2: Database; + let postgreSqlRestoreDatabase: Database; + + let googleSqlBackup1: Backup; + let googleSqlBackup2: Backup; - let backup1: Backup; - let backup2: Backup; + let postgreSqlBackup1: Backup; + let postgreSqlBackup2: Backup; + + const googleSqlBackup1Name = generateName('backup'); + const googleSqlBackup2Name = generateName('backup'); + + const postgreSqlBackup1Name = generateName('pg-backup'); + const postgreSqlBackup2Name = generateName('pg-backup'); - const backup1Name = generateName('backup'); - const backup2Name = generateName('backup'); const backupExpiryDate = futureDateByHours(12); const backupExpiryPreciseDate = Spanner.timestamp(backupExpiryDate); @@ -1221,77 +1623,156 @@ describe('Spanner', () => { if (SKIP_BACKUPS === 'true') { this.skip(); } - database1 = DATABASE; + googleSqlDatabase1 = DATABASE; + postgreSqlDatabase1 = PG_DATABASE; - await database1.table(TABLE_NAME).insert({ + await googleSqlDatabase1.table(TABLE_NAME).insert({ + SingerId: generateName('id'), + Name: generateName('name'), + }); + + await postgreSqlDatabase1.table(TABLE_NAME).insert({ SingerId: generateName('id'), Name: generateName('name'), }); // Create a second database since only one pending backup can be created // per database. - database2 = instance.database(generateName('database')); - const [, database2CreateOperation] = await database2.create({ - schema: ` + googleSqlDatabase2 = instance.database(generateName('database')); + const [, googleSqlDatabase2CreateOperation] = + await googleSqlDatabase2.create({ + schema: ` CREATE TABLE Albums ( AlbumId STRING(1024) NOT NULL, AlbumTitle STRING(1024) NOT NULL, ) PRIMARY KEY(AlbumId)`, - gaxOptions: GAX_OPTIONS, - }); - await database2CreateOperation.promise(); - RESOURCES_TO_CLEAN.push(database2); + gaxOptions: GAX_OPTIONS, + }); + await googleSqlDatabase2CreateOperation.promise(); + RESOURCES_TO_CLEAN.push(googleSqlDatabase2); + + if (!SKIP_POSTGRESQL_BACKUP_TESTS) { + postgreSqlDatabase2 = instance.database(generateName('pg-db')); + const [, postgreSqlDatabase2CreateOperation] = + await postgreSqlDatabase2.create({ + databaseDialect: Spanner.POSTGRESQL, + gaxOptions: GAX_OPTIONS, + }); + await postgreSqlDatabase2CreateOperation.promise(); + const schema = [ + `CREATE TABLE Albums ( + AlbumId VARCHAR NOT NULL PRIMARY KEY, + AlbumTitle VARCHAR NOT NULL + );`, + ]; + const [postgreSqlDatabase2UpdateOperation] = + await postgreSqlDatabase2.updateSchema(schema); + await postgreSqlDatabase2UpdateOperation.promise(); + RESOURCES_TO_CLEAN.push(postgreSqlDatabase2); + } // Initialize a database instance to restore to. - restoreDatabase = instance.database(generateName('database')); + googleSqlRestoreDatabase = instance.database(generateName('database')); + postgreSqlRestoreDatabase = instance.database(generateName('pg-db')); // Create backups. - backup1 = instance.backup(backup1Name); - backup2 = instance.backup(backup2Name); - const [, backup1Operation] = await backup1.create({ - databasePath: database1.formattedName_, + googleSqlBackup1 = instance.backup(googleSqlBackup1Name); + googleSqlBackup2 = instance.backup(googleSqlBackup2Name); + const [, googleSqlBackup1Operation] = await googleSqlBackup1.create({ + databasePath: googleSqlDatabase1.formattedName_, expireTime: backupExpiryDate, gaxOptions: GAX_OPTIONS, }); - const [, backup2Operation] = await backup2.create({ - databasePath: database2.formattedName_, + const [, googleSqlBackup2Operation] = await googleSqlBackup2.create({ + databasePath: googleSqlDatabase2.formattedName_, expireTime: backupExpiryDate, gaxOptions: GAX_OPTIONS, }); - assert.strictEqual( - backup1Operation.metadata!.name, - `${instance.formattedName_}/backups/${backup1Name}` - ); - if ('database' in backup1Operation.metadata) { + if ('database' in googleSqlBackup1Operation.metadata) { assert.strictEqual( - backup1Operation.metadata!.database, - database1.formattedName_ + googleSqlBackup1Operation.metadata!.name, + `${instance.formattedName_}/backups/${googleSqlBackup1Name}` ); } - assert.strictEqual( - backup2Operation.metadata!.name, - `${instance.formattedName_}/backups/${backup2Name}` - ); - if ('database' in backup2Operation.metadata) { + if ('database' in googleSqlBackup1Operation.metadata) { assert.strictEqual( - backup2Operation.metadata!.database, - database2.formattedName_ + googleSqlBackup1Operation.metadata!.database, + googleSqlDatabase1.formattedName_ + ); + } + + if ('database' in googleSqlBackup2Operation.metadata) { + assert.strictEqual( + googleSqlBackup2Operation.metadata!.name, + `${instance.formattedName_}/backups/${googleSqlBackup2Name}` + ); + } + if ('database' in googleSqlBackup2Operation.metadata) { + assert.strictEqual( + googleSqlBackup2Operation.metadata!.database, + googleSqlDatabase2.formattedName_ ); } // Wait for backups to finish. - await backup1Operation.promise(); - await backup2Operation.promise(); - RESOURCES_TO_CLEAN.push(...[backup1, backup2]); + await googleSqlBackup1Operation.promise(); + await googleSqlBackup2Operation.promise(); + RESOURCES_TO_CLEAN.push(...[googleSqlBackup1, googleSqlBackup2]); + + if (!SKIP_POSTGRESQL_BACKUP_TESTS) { + postgreSqlBackup1 = instance.backup(postgreSqlBackup1Name); + postgreSqlBackup2 = instance.backup(postgreSqlBackup2Name); + const [, postgreSqlBackup1Operation] = await postgreSqlBackup1.create({ + databasePath: postgreSqlDatabase1.formattedName_, + expireTime: backupExpiryDate, + gaxOptions: GAX_OPTIONS, + }); + const [, postgreSqlBackup2Operation] = await postgreSqlBackup2.create({ + databasePath: postgreSqlDatabase2.formattedName_, + expireTime: backupExpiryDate, + gaxOptions: GAX_OPTIONS, + }); + + if ('database' in postgreSqlBackup1Operation.metadata) { + assert.strictEqual( + postgreSqlBackup1Operation.metadata!.name, + `${instance.formattedName_}/backups/${postgreSqlBackup1Name}` + ); + } + if ('database' in postgreSqlBackup1Operation.metadata) { + assert.strictEqual( + postgreSqlBackup1Operation.metadata!.database, + postgreSqlDatabase1.formattedName_ + ); + } + if ('database' in postgreSqlBackup2Operation.metadata) { + assert.strictEqual( + postgreSqlBackup2Operation.metadata!.name, + `${instance.formattedName_}/backups/${postgreSqlBackup2Name}` + ); + } + if ('database' in postgreSqlBackup2Operation.metadata) { + assert.strictEqual( + postgreSqlBackup2Operation.metadata!.database, + postgreSqlDatabase2.formattedName_ + ); + } + + // Wait for backups to finish. + await postgreSqlBackup1Operation.promise(); + await postgreSqlBackup2Operation.promise(); + + RESOURCES_TO_CLEAN.push(...[postgreSqlBackup1, postgreSqlBackup2]); + } }); function futureDateByHours(futureHours: number): number { return Date.now() + 1000 * 60 * 60 * futureHours; } - it('should have completed a backup', async () => { + const completedBackup = async (backup1, backup1Name, database1) => { // Validate backup has completed. const [backupInfo] = await backup1.getMetadata(); assert.strictEqual(backupInfo.state, 'READY'); @@ -1317,9 +1798,25 @@ describe('Spanner', () => { ); const exists = await backup1.exists(); assert.strictEqual(exists, true); + }; + + it('GOOGLE_STANDARD_SQL should have completed a backup', async () => { + await completedBackup( + googleSqlBackup1, + googleSqlBackup1Name, + googleSqlDatabase1 + ); }); - it('should return error for backup expiration time in the past', async () => { + it.skip('POSTGRESQL should have completed a backup', async () => { + await completedBackup( + postgreSqlBackup1, + postgreSqlBackup1Name, + postgreSqlDatabase1 + ); + }); + + const pastBackupExpirationTimeError = async database1 => { // Create backup. const backupName = generateName('backup'); const backupExpiryDate = futureDateByHours(-12); @@ -1336,6 +1833,14 @@ describe('Spanner', () => { // Expect to get invalid argument error indicating the expiry date assert.strictEqual(err.code, grpc.status.INVALID_ARGUMENT); } + }; + + it('GOOGLE_STANDARD_SQL should return error for backup expiration time in the past', async () => { + await pastBackupExpirationTimeError(googleSqlDatabase1); + }); + + it.skip('POSTGRESQL should return error for backup expiration time in the past', async () => { + await pastBackupExpirationTimeError(postgreSqlDatabase1); }); it('should return false for a backup that does not exist', async () => { @@ -1351,8 +1856,17 @@ describe('Spanner', () => { const [backups] = await instance.getBackups(); assert.ok(backups.length > 0); assert.ok( - backups.find(backup => backup.formattedName_ === backup1.formattedName_) + backups.find( + backup => backup.formattedName_ === googleSqlBackup1.formattedName_ + ) ); + if (!IS_EMULATOR_ENABLED && !SKIP_POSTGRESQL_BACKUP_TESTS) { + assert.ok( + backups.find( + backup => backup.formattedName_ === postgreSqlBackup1.formattedName_ + ) + ); + } }); it('should list backups with pagination', async () => { @@ -1365,23 +1879,44 @@ describe('Spanner', () => { pageToken: resp1!.nextPageToken!, gaxOptions: {autoPaginate: false}, }); + + let page3size = 2; + if (!IS_EMULATOR_ENABLED && !SKIP_POSTGRESQL_BACKUP_TESTS) { + page3size = 4; + } const [page3] = await instance.getBackups({ - pageSize: 2, + pageSize: page3size, gaxOptions: {autoPaginate: false}, }); assert.strictEqual(page1.length, 1); assert.strictEqual(page2.length, 1); - assert.strictEqual(page3.length, 2); + assert.strictEqual(page3.length, page3size); assert.notStrictEqual(page2[0].formattedName_, page1[0].formattedName_); assert.ok( - page3.find(backup => backup.formattedName_ === backup1.formattedName_) + page3.find( + backup => backup.formattedName_ === googleSqlBackup1.formattedName_ + ) ); assert.ok( - page3.find(backup => backup.formattedName_ === backup2.formattedName_) + page3.find( + backup => backup.formattedName_ === googleSqlBackup2.formattedName_ + ) ); + if (!IS_EMULATOR_ENABLED && !SKIP_POSTGRESQL_BACKUP_TESTS) { + assert.ok( + page3.find( + backup => backup.formattedName_ === postgreSqlBackup1.formattedName_ + ) + ); + assert.ok( + page3.find( + backup => backup.formattedName_ === postgreSqlBackup2.formattedName_ + ) + ); + } }); - it('should restore a backup', async () => { + const restoreBackup = async (restoreDatabase, backup1, database1) => { // Perform restore to a different database. const [, restoreOperation] = await restoreDatabase.restore( backup1.formattedName_ @@ -1428,9 +1963,25 @@ describe('Spanner', () => { filter: 'metadata.@type:RestoreDatabaseMetadata', }); assert.strictEqual(restoreOperations.length, 1); + }; + + it('GOOGLE_STANDARD_SQL should restore a backup', async () => { + await restoreBackup( + googleSqlRestoreDatabase, + googleSqlBackup1, + googleSqlDatabase1 + ); + }); + + it.skip('POSTGRESQL should restore a backup', async () => { + await restoreBackup( + postgreSqlRestoreDatabase, + postgreSqlBackup1, + postgreSqlDatabase1 + ); }); - it('should not be able to restore to an existing database', async () => { + const restoreExistingDatabaseFail = async (database1, backup1) => { // Perform restore to the same database - should fail. try { await database1.restore(backup1.formattedName_); @@ -1439,9 +1990,23 @@ describe('Spanner', () => { // Expect to get error indicating database already exists. assert.strictEqual(err.code, grpc.status.ALREADY_EXISTS); } + }; + + it('GOOGLE_STANDARD_SQL should not be able to restore to an existing database', async () => { + await restoreExistingDatabaseFail( + googleSqlRestoreDatabase, + googleSqlBackup1 + ); + }); + + it.skip('POSTGRESQL should not be able to restore to an existing database', async () => { + await restoreExistingDatabaseFail( + postgreSqlRestoreDatabase, + postgreSqlBackup1 + ); }); - it('should update backup expiry', async () => { + const updateBackupExpiry = async backup1 => { // Update backup expiry date. const updatedBackupExpiryDate = futureDateByHours(24); await backup1.updateExpireTime(updatedBackupExpiryDate); @@ -1456,9 +2021,17 @@ describe('Spanner', () => { expiryDateFromMetadataAfterUpdate, Spanner.timestamp(updatedBackupExpiryDate) ); + }; + + it('GOOGLE_STANDARD_SQL should update backup expiry', async () => { + await updateBackupExpiry(googleSqlBackup1); + }); + + it.skip('POSTGRESQL should update backup expiry', async () => { + await updateBackupExpiry(postgreSqlBackup1); }); - it('should not update backup expiry to the past', async () => { + const pastBackupUpdateExpiryDateFail = async backup1 => { // Attempt to update expiry date to the past. const expiryDateInPast = futureDateByHours(-24); try { @@ -1470,9 +2043,17 @@ describe('Spanner', () => { // Expect to get invalid argument error indicating the expiry date. assert.strictEqual(err.code, grpc.status.INVALID_ARGUMENT); } + }; + + it('GOOGLE_STANDARD_SQL should not update backup expiry to the past', async () => { + await pastBackupUpdateExpiryDateFail(googleSqlBackup1); + }); + + it.skip('POSTGRESQL should not update backup expiry to the past', async () => { + await pastBackupUpdateExpiryDateFail(postgreSqlBackup1); }); - it('should delete backup', async () => { + const deleteBackup = async backup2 => { // Delete backup. await backup2.delete(); @@ -1484,9 +2065,17 @@ describe('Spanner', () => { } catch (err) { assert.strictEqual(err.code, grpc.status.NOT_FOUND); } + }; + + it('GOOGLE_STANDARD_SQL should delete backup', async () => { + await deleteBackup(googleSqlBackup2); }); - it('should list backup operations', async () => { + it.skip('POSTGRESQL should delete backup', async () => { + await deleteBackup(postgreSqlBackup2); + }); + + const listBackupOperations = async (backup1, database1) => { // List operations and ensure operation for current backup exists. // Without a filter. const [operationsWithoutFilter] = await instance.getBackupOperations(); @@ -1519,6 +2108,14 @@ describe('Spanner', () => { operationForCurrentBackupWithFilterMetadata.database, database1.formattedName_ ); + }; + + it('GOOGLE_STANDARD_SQL should delete backup', async () => { + await listBackupOperations(googleSqlBackup1, googleSqlDatabase1); + }); + + it.skip('POSTGRESQL should delete backup', async () => { + await listBackupOperations(postgreSqlBackup1, postgreSqlDatabase1); }); }); @@ -1571,31 +2168,48 @@ describe('Spanner', () => { describe('Tables', () => { const TABLE_NAME = 'SingersTables'; - const table = DATABASE.table(TABLE_NAME); + const googleSqlTable = DATABASE.table(TABLE_NAME); + const postgreSqlTable = PG_DATABASE.table(TABLE_NAME); - before(() => { - return table - .create( - ` - CREATE TABLE ${TABLE_NAME} ( - SingerId STRING(1024) NOT NULL, - Name STRING(1024), - Float FLOAT64, - Int INT64, - Info BYTES(MAX), - Created TIMESTAMP, - DOB DATE, - Accents ARRAY, - PhoneNumbers ARRAY, - HasGear BOOL, - ) PRIMARY KEY(SingerId)`, + before(async () => { + const googleSqlCreateTable = await googleSqlTable.create( + `CREATE TABLE ${TABLE_NAME} + ( + SingerId STRING(1024) NOT NULL, + Name STRING(1024), + Float FLOAT64, + Int INT64, + Info BYTES( MAX), + Created TIMESTAMP, + DOB DATE, + Accents ARRAY, + PhoneNumbers ARRAY, + HasGear BOOL, + ) PRIMARY KEY(SingerId)`, + GAX_OPTIONS + ); + await onPromiseOperationComplete(googleSqlCreateTable); + + if (!IS_EMULATOR_ENABLED) { + const postgreSqlCreateTable = await postgreSqlTable.create( + `CREATE TABLE ${TABLE_NAME} + ( + "SingerId" VARCHAR(1024) NOT NULL PRIMARY KEY, + "Name" VARCHAR(1024), + "Float" DOUBLE PRECISION, + "Int" BIGINT, + "Info" BYTEA, + "Created" TIMESTAMPTZ, + "HasGear" BOOL + )`, GAX_OPTIONS - ) - .then(onPromiseOperationComplete); + ); + await onPromiseOperationComplete(postgreSqlCreateTable); + } }); - it('should throw an error for non-existant tables', done => { - const table = DATABASE.table(generateName('nope')); + const nonExistentTable = (done, database) => { + const table = database.table(generateName('nope')); table.insert( { @@ -1606,9 +2220,20 @@ describe('Spanner', () => { done(); } ); + }; + + it('GOOGLE_STANDARD_SQL should throw an error for non-existent tables', done => { + nonExistentTable(done, DATABASE); + }); + + it('POSTGRESQL should throw an error for non-existent tables', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + nonExistentTable(done, PG_DATABASE); }); - it('should throw an error for non-existant columns', done => { + const nonExistentColumn = (done, table) => { table.insert( { SingerId: generateName('id'), @@ -1619,9 +2244,20 @@ describe('Spanner', () => { done(); } ); + }; + + it('GOOGLE_STANDARD_SQL should throw an error for non-existent columns', done => { + nonExistentColumn(done, googleSqlTable); }); - it('should read rows as a stream', done => { + it('POSTGRESQL should throw an error for non-existent columns', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + nonExistentColumn(done, postgreSqlTable); + }); + + const readRowsStream = (done, table) => { const id = generateName('id'); const name = generateName('name'); @@ -1638,7 +2274,7 @@ describe('Spanner', () => { table .createReadStream({ keys: [id], - columns: ['SingerId', 'name'], + columns: ['SingerId', 'Name'], }) .on('error', done) .on('data', row => { @@ -1659,9 +2295,20 @@ describe('Spanner', () => { }); } ); + }; + + it('GOOGLE_STANDARD_SQL should read rows as a stream', done => { + readRowsStream(done, googleSqlTable); + }); + + it('POSTGRESQL should read rows as a stream', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readRowsStream(done, postgreSqlTable); }); - it('should automatically convert to JSON', done => { + const automaticallyConvertToJson = (done, table) => { const id = generateName('id'); const name = generateName('name'); @@ -1695,9 +2342,20 @@ describe('Spanner', () => { }); } ); + }; + + it('GOOGLE_STANDARD_SQL should automatically convert to JSON', done => { + automaticallyConvertToJson(done, googleSqlTable); + }); + + it('POSTGRESQL should automatically convert to JSON', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + automaticallyConvertToJson(done, postgreSqlTable); }); - it('should automatically convert to JSON with options', done => { + const automaticallyConvertToJsonWithOptions = (done, table) => { const id = generateName('id'); table.insert( @@ -1728,9 +2386,20 @@ describe('Spanner', () => { }); } ); + }; + + it('GOOGLE_STANDARD_SQL should automatically convert to JSON with options', done => { + automaticallyConvertToJsonWithOptions(done, googleSqlTable); + }); + + it('POSTGRESQL should automatically convert to JSON with options', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + automaticallyConvertToJsonWithOptions(done, postgreSqlTable); }); - it('should insert and delete a row', done => { + const insertAndDeleteSingleRow = (done, table) => { const id = generateName('id'); const name = generateName('name'); @@ -1763,9 +2432,20 @@ describe('Spanner', () => { }); } ); + }; + + it('GOOGLE_STANDARD_SQL should insert and delete a row', done => { + insertAndDeleteSingleRow(done, googleSqlTable); + }); + + it('POSTGRESQL should insert and delete a row', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + insertAndDeleteSingleRow(done, postgreSqlTable); }); - it('should insert and delete multiple rows', done => { + const insertAndDeleteMultipleRows = (done, table) => { const id = generateName('id'); const id2 = generateName('id2'); @@ -1807,16 +2487,30 @@ describe('Spanner', () => { }); } ); + }; + + it('GOOGLE_STANDARD_SQL should insert and delete multiple rows', done => { + insertAndDeleteMultipleRows(done, googleSqlTable); + }); + + it('POSTGRESQL should insert and delete multiple rows', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + insertAndDeleteMultipleRows(done, postgreSqlTable); }); - it('should insert and delete multiple composite key rows', () => { + const insertAndDeleteMultipleCompositeKeyRows = ( + database, + createTableStatement + ) => { const id1 = 1; const name1 = generateName('name1'); const id2 = 2; const name2 = generateName('name2'); - const table = DATABASE.table('SingersComposite'); + const table = database.table('SingersComposite'); const keys = [ [id1, name1], @@ -1824,14 +2518,7 @@ describe('Spanner', () => { ] as {} as string[]; return table - .create( - ` - CREATE TABLE SingersComposite ( - SingerId INT64 NOT NULL, - Name STRING(1024), - ) PRIMARY KEY(SingerId, Name) - ` - ) + .create(createTableStatement) .then(onPromiseOperationComplete) .then(() => { return table.insert([ @@ -1868,9 +2555,35 @@ describe('Spanner', () => { const rows = data[0]; assert.strictEqual(rows.length, 0); }); + }; + + it('GOOGLE_STANDARD_SQL should insert and delete composite key rows', () => { + const createTableStatement = ` + CREATE TABLE SingersComposite ( + SingerId INT64 NOT NULL, + Name STRING(1024), + ) PRIMARY KEY(SingerId, Name) + `; + insertAndDeleteMultipleCompositeKeyRows(DATABASE, createTableStatement); }); - it('should insert and query multiple rows', done => { + it('POSTGRESQL should insert and delete multiple composite key rows', function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const createTableStatement = ` + CREATE TABLE SingersComposite ( + "SingerId" BIGINT NOT NULL, + "Name" VARCHAR(1024), + PRIMARY KEY("SingerId", "Name") + )`; + insertAndDeleteMultipleCompositeKeyRows( + PG_DATABASE, + createTableStatement + ); + }); + + const insertAndQueryMultipleRows = (done, database, table, query) => { const id1 = generateName('id1'); const name1 = generateName('name'); @@ -1891,30 +2604,48 @@ describe('Spanner', () => { err => { assert.ifError(err); - DATABASE.run( - `SELECT * FROM ${TABLE_NAME} ORDER BY SingerId`, - (err, rows) => { - assert.ifError(err); + database.run(query, (err, rows) => { + assert.ifError(err); - // We just want the two most recent ones. - rows!.splice(0, rows!.length - 2); + // We just want the two most recent ones. + rows!.splice(0, rows!.length - 2); - const rowJson = rows!.map(x => x.toJSON()); + const rowJson = rows!.map(x => x.toJSON()); - assert.strictEqual(rowJson[0].SingerId, id1); - assert.strictEqual(rowJson[0].Name, name1); + assert.strictEqual(rowJson[0].SingerId, id1); + assert.strictEqual(rowJson[0].Name, name1); - assert.strictEqual(rowJson[1].SingerId, id2); - assert.strictEqual(rowJson[1].Name, name2); + assert.strictEqual(rowJson[1].SingerId, id2); + assert.strictEqual(rowJson[1].Name, name2); - done(); - } - ); + done(); + }); } ); + }; + + it('GOOGLE_STANDARD_SQL should insert and query multiple rows', done => { + insertAndQueryMultipleRows( + done, + DATABASE, + googleSqlTable, + `SELECT * FROM ${TABLE_NAME} ORDER BY SingerId` + ); + }); + + it('POSTGRESQL should should insert and query multiple rows', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + insertAndQueryMultipleRows( + done, + PG_DATABASE, + postgreSqlTable, + `SELECT * FROM ${TABLE_NAME} ORDER BY "SingerId"` + ); }); - it('should insert then replace a row', done => { + const insertThenReplaceRow = (done, table) => { const originalRow = { SingerId: generateName('id'), Name: generateName('name'), @@ -1948,9 +2679,20 @@ describe('Spanner', () => { ); }); }); + }; + + it('GOOGLE_STANDARD_SQL should insert then replace a row', done => { + insertThenReplaceRow(done, googleSqlTable); + }); + + it('POSTGRESQL should insert then replace a row', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + insertThenReplaceRow(done, postgreSqlTable); }); - it('should insert then update a row', done => { + const insertThenUpdateRow = (done, table) => { const originalRow = { SingerId: generateName('id'), Name: generateName('name'), @@ -1985,6 +2727,17 @@ describe('Spanner', () => { ); }); }); + }; + + it('GOOGLE_STANDARD_SQL should insert then replace a row', done => { + insertThenUpdateRow(done, googleSqlTable); + }); + + it('POSTGRESQL should insert then replace a row', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + insertThenUpdateRow(done, postgreSqlTable); }); describe('insert & query', () => { @@ -1999,7 +2752,7 @@ describe('Spanner', () => { const PHONE_NUMBERS = [123123123, 234234234]; const HAS_GEAR = true; - const INSERT_ROW = { + const GOOGLE_SQL_INSERT_ROW = { SingerId: ID, Name: NAME, Float: FLOAT, @@ -2012,64 +2765,98 @@ describe('Spanner', () => { HasGear: HAS_GEAR, }; - const EXPECTED_ROW = extend(true, {}, INSERT_ROW); + const POSTGRESQL_INSERT_ROW = { + SingerId: ID, + Name: NAME, + Float: FLOAT, + Int: INT, + Info: INFO, + Created: CREATED, + HasGear: HAS_GEAR, + }; + + const GOOGLE_SQL_EXPECTED_ROW = extend(true, {}, GOOGLE_SQL_INSERT_ROW); + const POSTGRESQL_EXPECTED_ROW = extend(true, {}, POSTGRESQL_INSERT_ROW); before(() => { - return table.insert(INSERT_ROW); + return googleSqlTable.insert(GOOGLE_SQL_INSERT_ROW).then(() => { + postgreSqlTable.insert(POSTGRESQL_INSERT_ROW); + }); }); - it('should query in callback mode', done => { + const queryCallbackMode = (done, database, query, EXPECTED_ROW) => { const options = { strong: true, }; - DATABASE.run( - { - sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, - params: {id: ID}, - }, - options, - (err, rows) => { - assert.ifError(err); - assert.deepStrictEqual(rows!.shift()!.toJSON(), EXPECTED_ROW); - done(); - } - ); + database.run(query, options, (err, rows) => { + assert.ifError(err); + assert.deepStrictEqual(rows!.shift()!.toJSON(), EXPECTED_ROW); + done(); + }); + }; + + it('GOOGLE_STANDARD_SQL should query in callback mode', done => { + const query = { + sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, + params: {id: ID}, + }; + queryCallbackMode(done, DATABASE, query, GOOGLE_SQL_EXPECTED_ROW); }); - it('should query in promise mode', done => { + it('POSTGRESQL should query in callback mode', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: `SELECT * FROM ${TABLE_NAME} WHERE "SingerId"=$1`, + params: {p1: ID}, + }; + queryCallbackMode(done, PG_DATABASE, query, POSTGRESQL_EXPECTED_ROW); + }); + + const queryPromiseMode = (done, database, query, EXPECTED_ROW) => { const options = { strong: true, }; - DATABASE.run( - { - sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, - params: {id: ID}, - }, - options - ) + database + .run(query, options) .then(data => { const rows = data[0] as {} as Row[]; assert.deepStrictEqual(rows!.shift()!.toJSON(), EXPECTED_ROW); done(); }) .catch(done); + }; + + it('GOOGLE_STANDARD_SQL should query in promise mode', done => { + const query = { + sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, + params: {id: ID}, + }; + queryPromiseMode(done, DATABASE, query, GOOGLE_SQL_EXPECTED_ROW); }); - it('should query in stream mode', done => { + it('POSTGRESQL should query in promise mode', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: `SELECT * FROM ${TABLE_NAME} WHERE "SingerId"=$1`, + params: {p1: ID}, + }; + queryPromiseMode(done, PG_DATABASE, query, POSTGRESQL_EXPECTED_ROW); + }); + + const queryStreamMode = (done, database, query, EXPECTED_ROW) => { const options = { strong: true, }; let row; - const stream = DATABASE.runStream( - { - sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, - params: {id: ID}, - }, - options - ) + const stream = database + .runStream(query, options) .on('error', done) .once('data', row_ => { row = row_; @@ -2079,19 +2866,45 @@ describe('Spanner', () => { assert.deepStrictEqual(row.toJSON(), EXPECTED_ROW); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should query in stream mode', done => { + const query = { + sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, + params: {id: ID}, + }; + queryStreamMode(done, DATABASE, query, GOOGLE_SQL_EXPECTED_ROW); }); - it('should allow "SELECT 1" queries', done => { + it('POSTGRESQL should query in stream mode', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: `SELECT * FROM ${TABLE_NAME} WHERE "SingerId"=$1`, + params: {p1: ID}, + }; + queryStreamMode(done, PG_DATABASE, query, POSTGRESQL_EXPECTED_ROW); + }); + + it('GOOGLE_STANDARD_SQL should allow "SELECT 1" queries', done => { DATABASE.run('SELECT 1', done); }); - it('should return metadata', async () => { + it('POSTGRESQL should allow "SELECT 1" queries', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + PG_DATABASE.run('SELECT 1', done); + }); + + it('GOOGLE_STANDARD_SQL should return metadata', async () => { const [rows, , metadata] = await DATABASE.run({ sql: `SELECT * FROM ${TABLE_NAME} WHERE SingerId=@id`, params: {id: ID}, }); assert.strictEqual(rows.length, 1); - assert.deepStrictEqual(rows[0].toJSON(), EXPECTED_ROW); + assert.deepStrictEqual(rows[0].toJSON(), GOOGLE_SQL_EXPECTED_ROW); assert.ok(metadata); assert.strictEqual(metadata.rowType!.fields!.length, 10); assert.strictEqual(metadata.rowType!.fields![0].name, 'SingerId'); @@ -2106,14 +2919,46 @@ describe('Spanner', () => { assert.strictEqual(metadata.rowType!.fields![9].name, 'HasGear'); }); - it('should fail invalid queries', done => { - DATABASE.run('SELECT Apples AND Oranges', err => { + it('POSTGRESQL should return metadata', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const [rows, , metadata] = await PG_DATABASE.run({ + sql: `SELECT * FROM ${TABLE_NAME} WHERE "SingerId"=$1`, + params: {p1: ID}, + }); + assert.strictEqual(rows.length, 1); + assert.deepStrictEqual(rows[0].toJSON(), POSTGRESQL_EXPECTED_ROW); + assert.ok(metadata); + assert.strictEqual(metadata.rowType!.fields!.length, 7); + assert.strictEqual(metadata.rowType!.fields![0].name, 'SingerId'); + assert.strictEqual(metadata.rowType!.fields![1].name, 'Name'); + assert.strictEqual(metadata.rowType!.fields![2].name, 'Float'); + assert.strictEqual(metadata.rowType!.fields![3].name, 'Int'); + assert.strictEqual(metadata.rowType!.fields![4].name, 'Info'); + assert.strictEqual(metadata.rowType!.fields![5].name, 'Created'); + assert.strictEqual(metadata.rowType!.fields![6].name, 'HasGear'); + }); + + const invalidQueries = (done, database) => { + database.run('SELECT Apples AND Oranges', err => { assert.strictEqual(err!.code, 3); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should allow "SELECT 1" queries', done => { + invalidQueries(done, DATABASE); + }); + + it('POSTGRESQL should allow "SELECT 1" queries', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + invalidQueries(done, PG_DATABASE); }); - it('should query an array of structs', done => { + it('GOOGLE_STANDARD_SQL should query an array of structs', done => { const query = ` SELECT ARRAY(SELECT AS STRUCT C1, C2 FROM (SELECT 'a' AS C1, 1 AS C2 UNION ALL SELECT 'b' AS C1, 2 AS C2) @@ -2141,7 +2986,7 @@ describe('Spanner', () => { }); }); - it('should query an empty array of structs', done => { + it('GOOGLE_STANDARD_SQL should query an empty array of structs', done => { const query = ` SELECT ARRAY(SELECT AS STRUCT * FROM (SELECT 'a', 1) WHERE 0 = 1)`; @@ -2154,22 +2999,38 @@ describe('Spanner', () => { describe('params', () => { describe('boolean', () => { - it('should bind the value', done => { + const booleanQuery = (done, database, query, value) => { + database.run(query, (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows[0][0].value, value); + done(); + }); + }; + + it('GOOGLE_STANDARD_SQL should bind the value', done => { const query = { sql: 'SELECT @v', params: { v: true, }, }; + booleanQuery(done, DATABASE, query, true); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, true); - done(); - }); + it('POSTGRESQL should bind the value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: true, + }, + }; + booleanQuery(done, PG_DATABASE, query, true); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2179,15 +3040,26 @@ describe('Spanner', () => { v: 'bool', }, }; + booleanQuery(done, DATABASE, query, null); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, null); - done(); - }); + it('POSTGRESQL should allow for null values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: null, + }, + types: { + p1: 'bool', + }, + }; + booleanQuery(done, PG_DATABASE, query, null); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = [false, true, false]; const query = { @@ -2204,7 +3076,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2227,7 +3099,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2250,22 +3122,42 @@ describe('Spanner', () => { }); describe('int64', () => { - it('should bind the value', done => { + const int64Query = (done, database, query, value) => { + database.run(query, (err, rows) => { + assert.ifError(err); + let queriedValue = rows[0][0].value; + if (rows[0][0].value) { + queriedValue = rows[0][0].value.value; + } + assert.strictEqual(queriedValue, value); + done(); + }); + }; + + it('GOOGLE_STANDARD_SQL should bind the value', done => { const query = { sql: 'SELECT @v', params: { v: 1234, }, }; + int64Query(done, DATABASE, query, '1234'); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value.value, '1234'); - done(); - }); + it('POSTGRESQL should bind the value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: 1234, + }, + }; + int64Query(done, PG_DATABASE, query, '1234'); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2275,15 +3167,26 @@ describe('Spanner', () => { v: 'int64', }, }; + int64Query(done, DATABASE, query, null); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, null); - done(); - }); + it('POSTGRESQL should allow for null values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: null, + }, + types: { + p1: 'int64', + }, + }; + int64Query(done, PG_DATABASE, query, null); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = [1, 2, 3, null]; const query = { @@ -2308,7 +3211,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2331,7 +3234,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2354,22 +3257,42 @@ describe('Spanner', () => { }); describe('float64', () => { - it('should bind the value', done => { + const float64Query = (done, database, query, value) => { + database.run(query, (err, rows) => { + assert.ifError(err); + let queriedValue = rows[0][0].value; + if (rows[0][0].value) { + queriedValue = rows[0][0].value.value; + } + assert.strictEqual(queriedValue, value); + done(); + }); + }; + + it('GOOGLE_STANDARD_SQL should bind the value', done => { const query = { sql: 'SELECT @v', params: { v: 2.2, }, }; + float64Query(done, DATABASE, query, 2.2); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value.value, 2.2); - done(); - }); + it('POSTGRESQL should bind the value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: 2.2, + }, + }; + float64Query(done, PG_DATABASE, query, 2.2); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2379,15 +3302,26 @@ describe('Spanner', () => { v: 'float64', }, }; + float64Query(done, DATABASE, query, null); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, null); - done(); - }); + it('POSTGRESQL should allow for null values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: null, + }, + types: { + p1: 'float64', + }, + }; + float64Query(done, PG_DATABASE, query, null); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = [null, 1.1, 2.3, 3.5, null]; const query = { @@ -2412,7 +3346,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2435,7 +3369,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2456,52 +3390,76 @@ describe('Spanner', () => { }); }); - it('should bind Infinity', done => { + it('GOOGLE_STANDARD_SQL should bind Infinity', done => { const query = { sql: 'SELECT @v', params: { v: Infinity, }, }; + float64Query(done, DATABASE, query, 'Infinity'); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value.value, 'Infinity'); - done(); - }); + it('POSTGRESQL should bind Infinity', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: Infinity, + }, + }; + float64Query(done, PG_DATABASE, query, 'Infinity'); }); - it('should bind -Infinity', done => { + it('GOOGLE_STANDARD_SQL should bind -Infinity', done => { const query = { sql: 'SELECT @v', params: { v: -Infinity, }, }; + float64Query(done, DATABASE, query, '-Infinity'); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value.value, '-Infinity'); - done(); - }); + it('POSTGRESQL should bind -Infinity', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: -Infinity, + }, + }; + float64Query(done, PG_DATABASE, query, '-Infinity'); }); - it('should bind NaN', done => { + it('GOOGLE_STANDARD_SQL should bind NaN', done => { const query = { sql: 'SELECT @v', params: { v: NaN, }, }; + float64Query(done, DATABASE, query, 'NaN'); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value.value, 'NaN'); - done(); - }); + it('POSTGRESQL should bind NaN', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: NaN, + }, + }; + float64Query(done, PG_DATABASE, query, 'NaN'); }); - it('should bind an array of Infinity and NaN', done => { + it('GOOGLE_STANDARD_SQL should bind an array of Infinity and NaN', done => { const values = [Infinity, -Infinity, NaN]; const query = { @@ -2528,22 +3486,38 @@ describe('Spanner', () => { }); describe('string', () => { - it('should bind the value', done => { + const stringQuery = (done, database, query, value) => { + database.run(query, (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows[0][0].value, value); + done(); + }); + }; + + it('GOOGLE_STANDARD_SQL should bind the value', done => { const query = { sql: 'SELECT @v', params: { v: 'abc', }, }; + stringQuery(done, DATABASE, query, 'abc'); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, 'abc'); - done(); - }); + it('POSTGRESQL should bind the value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: 'abc', + }, + }; + stringQuery(done, PG_DATABASE, query, 'abc'); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2553,15 +3527,26 @@ describe('Spanner', () => { v: 'string', }, }; + stringQuery(done, DATABASE, query, null); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, null); - done(); - }); + it('POSTGRESQL should allow for null values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: null, + }, + types: { + p1: 'string', + }, + }; + stringQuery(done, PG_DATABASE, query, null); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = ['a', 'b', 'c', null]; const query = { @@ -2578,7 +3563,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2601,7 +3586,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2624,24 +3609,38 @@ describe('Spanner', () => { }); describe('bytes', () => { - it('should bind the value', done => { - const buffer = Buffer.from('abc'); + const bytesQuery = (done, database, query, value) => { + database.run(query, (err, rows) => { + assert.ifError(err); + assert.deepStrictEqual(rows[0][0].value, value); + done(); + }); + }; + it('GOOGLE_STANDARD_SQL should bind the value', done => { const query = { sql: 'SELECT @v', params: { - v: buffer, + v: Buffer.from('abc'), }, }; + bytesQuery(done, DATABASE, query, Buffer.from('abc')); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.deepStrictEqual(rows[0][0].value, buffer); - done(); - }); + it('POSTGRESQL should bind the value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: Buffer.from('abc'), + }, + }; + bytesQuery(done, PG_DATABASE, query, Buffer.from('abc')); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2651,15 +3650,26 @@ describe('Spanner', () => { v: 'bytes', }, }; + bytesQuery(done, DATABASE, query, null); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.deepStrictEqual(rows[0][0].value, null); - done(); - }); + it('POSTGRESQL should allow for null values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: null, + }, + types: { + p1: 'bytes', + }, + }; + bytesQuery(done, PG_DATABASE, query, null); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = [Buffer.from('a'), Buffer.from('b'), null]; const query = { @@ -2676,7 +3686,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2699,7 +3709,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2722,24 +3732,40 @@ describe('Spanner', () => { }); describe('timestamp', () => { - it('should bind the value', done => { - const timestamp = Spanner.timestamp(); + const timestampQuery = (done, database, query, value) => { + database.run(query, (err, rows) => { + assert.ifError(err); + assert.deepStrictEqual(rows[0][0].value, value); + done(); + }); + }; + it('GOOGLE_STANDARD_SQL should bind the value', done => { + const timestamp = Spanner.timestamp(); const query = { sql: 'SELECT @v', params: { v: timestamp, }, }; + timestampQuery(done, DATABASE, query, timestamp); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.deepStrictEqual(rows[0][0].value, timestamp); - done(); - }); + it('POSTGRESQL should bind the value', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const timestamp = Spanner.timestamp(); + const query = { + sql: 'SELECT $1', + params: { + p1: timestamp, + }, + }; + timestampQuery(done, PG_DATABASE, query, timestamp); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2749,15 +3775,26 @@ describe('Spanner', () => { v: 'timestamp', }, }; + timestampQuery(done, DATABASE, query, null); + }); - DATABASE.run(query, (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows[0][0].value, null); - done(); - }); + it('POSTGRESQL should allow for null values', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT $1', + params: { + p1: null, + }, + types: { + p1: 'timestamp', + }, + }; + timestampQuery(done, PG_DATABASE, query, null); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = [ Spanner.timestamp(), Spanner.timestamp('3-3-1999'), @@ -2778,7 +3815,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2801,7 +3838,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2824,7 +3861,7 @@ describe('Spanner', () => { }); describe('date', () => { - it('should bind the value', done => { + it('GOOGLE_STANDARD_SQL should bind the value', done => { const date = Spanner.date(); const query = { @@ -2844,7 +3881,7 @@ describe('Spanner', () => { }); }); - it('should allow for null values', done => { + it('GOOGLE_STANDARD_SQL should allow for null values', done => { const query = { sql: 'SELECT @v', params: { @@ -2862,7 +3899,7 @@ describe('Spanner', () => { }); }); - it('should bind arrays', done => { + it('GOOGLE_STANDARD_SQL should bind arrays', done => { const values = [Spanner.date(), Spanner.date('3-3-1999'), null]; const query = { @@ -2884,7 +3921,7 @@ describe('Spanner', () => { }); }); - it('should bind empty arrays', done => { + it('GOOGLE_STANDARD_SQL should bind empty arrays', done => { const values = []; const query: ExecuteSqlRequest = { @@ -2907,7 +3944,7 @@ describe('Spanner', () => { }); }); - it('should bind null arrays', done => { + it('GOOGLE_STANDARD_SQL should bind null arrays', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @v', params: { @@ -2930,7 +3967,7 @@ describe('Spanner', () => { }); describe('structs', () => { - it('should bind a simple struct', done => { + it('GOOGLE_STANDARD_SQL should bind a simple struct', done => { const query = { sql: 'SELECT @structParam.userf, @p4', params: { @@ -2952,7 +3989,7 @@ describe('Spanner', () => { }); }); - it('should bind null structs', done => { + it('GOOGLE_STANDARD_SQL should bind null structs', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @structParam.userf is NULL', params: { @@ -2985,7 +4022,7 @@ describe('Spanner', () => { }); }); - it('should bind nested structs', done => { + it('GOOGLE_STANDARD_SQL should bind nested structs', done => { const query = { sql: 'SELECT @structParam.structf.nestedf', params: { @@ -3007,7 +4044,7 @@ describe('Spanner', () => { }); }); - it('should bind null nested structs', done => { + it('GOOGLE_STANDARD_SQL should bind null nested structs', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @structParam.structf.nestedf', params: { @@ -3042,7 +4079,7 @@ describe('Spanner', () => { }); }); - it('should bind empty structs', done => { + it('GOOGLE_STANDARD_SQL should bind empty structs', done => { const query = { sql: 'SELECT @structParam IS NULL', params: { @@ -3060,7 +4097,7 @@ describe('Spanner', () => { }); }); - it('should bind null structs with no fields', done => { + it('GOOGLE_STANDARD_SQL should bind null structs with no fields', done => { const query = { sql: 'SELECT @structParam IS NULL', params: { @@ -3081,7 +4118,7 @@ describe('Spanner', () => { }); }); - it('should bind structs with null fields', done => { + it('GOOGLE_STANDARD_SQL should bind structs with null fields', done => { const query: ExecuteSqlRequest = { sql: 'SELECT @structParam.f1', params: { @@ -3112,7 +4149,7 @@ describe('Spanner', () => { }); }); - it('should bind structs with duplicate fields', done => { + it('GOOGLE_STANDARD_SQL should bind structs with duplicate fields', done => { const query = { sql: 'SELECT @structParam=STRUCT(10, 11)', params: { @@ -3139,7 +4176,7 @@ describe('Spanner', () => { }); }); - it('should bind structs with missing field names', done => { + it('GOOGLE_STANDARD_SQL should bind structs with missing field names', done => { const query = { sql: 'SELECT @structParam=STRUCT(5)', params: { @@ -3157,7 +4194,7 @@ describe('Spanner', () => { }); }); - it('should allow equality checks', done => { + it('GOOGLE_STANDARD_SQL should allow equality checks', done => { const query = { sql: 'SELECT @structParam=STRUCT(1, "bob")', params: { @@ -3178,7 +4215,7 @@ describe('Spanner', () => { }); }); - it('should allow nullness checks', done => { + it('GOOGLE_STANDARD_SQL should allow nullness checks', done => { const query = { sql: 'SELECT @structParam IS NULL', params: { @@ -3199,7 +4236,7 @@ describe('Spanner', () => { }); }); - it('should allow an array of non-null structs', done => { + it('GOOGLE_STANDARD_SQL should allow an array of non-null structs', done => { const query = { sql: 'SELECT a.threadid FROM UNNEST(@arraysf) a', params: { @@ -3227,7 +4264,7 @@ describe('Spanner', () => { }); }); - it('should allow an array of structs with null fields', done => { + it('GOOGLE_STANDARD_SQL should allow an array of structs with null fields', done => { const query: ExecuteSqlRequest = { sql: 'SELECT a.threadid FROM UNNEST(@structParam.arraysf) a', params: { @@ -3270,7 +4307,7 @@ describe('Spanner', () => { }); }); - it('should allow a null array of structs', done => { + it('GOOGLE_STANDARD_SQL should allow a null array of structs', done => { const query: ExecuteSqlRequest = { sql: 'SELECT a.threadid FROM UNNEST(@structParamArray) a', params: { @@ -3303,9 +4340,10 @@ describe('Spanner', () => { describe('large reads', () => { const TABLE_NAME = 'LargeReads'; - const table = DATABASE.table(TABLE_NAME); + const googleSqlTable = DATABASE.table(TABLE_NAME); + const postgreSqlTable = PG_DATABASE.table(TABLE_NAME); - const expectedRow = { + const googleSqlExpectedRow = { Key: generateName('key'), StringValue: string(), StringArray: [string(), string(), string(), string()], @@ -3313,6 +4351,12 @@ describe('Spanner', () => { BytesArray: [bytes(), bytes(), bytes(), bytes()], }; + const postgreSqlExpectedRow = { + Key: generateName('key'), + StringValue: string(), + BytesValue: bytes(), + }; + function string() { const offset = Math.floor(Math.random() * 500); @@ -3331,28 +4375,39 @@ describe('Spanner', () => { return Buffer.from(bytes, 'base64'); } - before(() => { - return table - .create( - ` + before(async () => { + const googleSqlCreateTable = await googleSqlTable.create( + ` CREATE TABLE ${TABLE_NAME} ( - Key STRING(MAX) NOT NULL, - StringValue STRING(MAX), - StringArray ARRAY, - BytesValue BYTES(MAX), - BytesArray ARRAY - ) PRIMARY KEY (Key)` - ) - .then(onPromiseOperationComplete) - .then(() => { - return table.insert(expectedRow); - }); + Key STRING(MAX) NOT NULL, + StringValue STRING(MAX), + StringArray ARRAY, + BytesValue BYTES(MAX), + BytesArray ARRAY + ) PRIMARY KEY (Key)`, + GAX_OPTIONS + ); + await onPromiseOperationComplete(googleSqlCreateTable); + await googleSqlTable.insert(googleSqlExpectedRow); + + if (!IS_EMULATOR_ENABLED) { + const postgreSqlCreateTable = await postgreSqlTable.create( + `CREATE TABLE ${TABLE_NAME} ( + "Key" VARCHAR NOT NULL PRIMARY KEY, + "StringValue" VARCHAR, + "BytesValue" BYTEA + )`, + GAX_OPTIONS + ); + await onPromiseOperationComplete(postgreSqlCreateTable); + await postgreSqlTable.insert(postgreSqlExpectedRow); + } }); - it('should read large datasets', done => { - table.read( + it('GOOGLE_STANDARD_SQL should read large datasets', done => { + googleSqlTable.read( { - keys: [expectedRow.Key], + keys: [googleSqlExpectedRow.Key], columns: [ 'Key', 'StringValue', @@ -3366,26 +4421,69 @@ describe('Spanner', () => { const row = rows![0].toJSON(); - assert.strictEqual(row.Key, expectedRow.Key); - assert.strictEqual(row.StringValue, expectedRow.StringValue); - assert.deepStrictEqual(row.StringArray, expectedRow.StringArray); + assert.strictEqual(row.Key, googleSqlExpectedRow.Key); + assert.strictEqual( + row.StringValue, + googleSqlExpectedRow.StringValue + ); + assert.deepStrictEqual( + row.StringArray, + googleSqlExpectedRow.StringArray + ); row.BytesValue = base64ToBuffer(row.BytesValue); row.BytesArray = row.BytesArray.map(base64ToBuffer); - assert.deepStrictEqual(row.BytesValue, expectedRow.BytesValue); - assert.deepStrictEqual(row.BytesArray, expectedRow.BytesArray); + assert.deepStrictEqual( + row.BytesValue, + googleSqlExpectedRow.BytesValue + ); + assert.deepStrictEqual( + row.BytesArray, + googleSqlExpectedRow.BytesArray + ); + + done(); + } + ); + }); + + it('POSTGRESQL should read large datasets', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + postgreSqlTable.read( + { + keys: [postgreSqlExpectedRow.Key], + columns: ['Key', 'StringValue', 'BytesValue'], + }, + (err, rows) => { + assert.ifError(err); + + const row = rows![0].toJSON(); + + assert.strictEqual(row.Key, postgreSqlExpectedRow.Key); + assert.strictEqual( + row.StringValue, + postgreSqlExpectedRow.StringValue + ); + + row.BytesValue = base64ToBuffer(row.BytesValue); + assert.deepStrictEqual( + row.BytesValue, + postgreSqlExpectedRow.BytesValue + ); done(); } ); }); - it('should query large datasets', done => { + it('GOOGLE_STANDARD_SQL should query large datasets', done => { const query = { - sql: 'SELECT * FROM ' + table.name + ' WHERE Key = @key', + sql: 'SELECT * FROM ' + googleSqlTable.name + ' WHERE Key = @key', params: { - key: expectedRow.Key, + key: googleSqlExpectedRow.Key, }, }; @@ -3394,15 +4492,59 @@ describe('Spanner', () => { const row = rows[0].toJSON(); - assert.strictEqual(row.Key, expectedRow.Key); - assert.strictEqual(row.StringValue, expectedRow.StringValue); - assert.deepStrictEqual(row.StringArray, expectedRow.StringArray); + assert.strictEqual(row.Key, googleSqlExpectedRow.Key); + assert.strictEqual( + row.StringValue, + googleSqlExpectedRow.StringValue + ); + assert.deepStrictEqual( + row.StringArray, + googleSqlExpectedRow.StringArray + ); row.BytesValue = base64ToBuffer(row.BytesValue); row.BytesArray = row.BytesArray.map(base64ToBuffer); - assert.deepStrictEqual(row.BytesValue, expectedRow.BytesValue); - assert.deepStrictEqual(row.BytesArray, expectedRow.BytesArray); + assert.deepStrictEqual( + row.BytesValue, + googleSqlExpectedRow.BytesValue + ); + assert.deepStrictEqual( + row.BytesArray, + googleSqlExpectedRow.BytesArray + ); + + done(); + }); + }); + + it('POSTGRESQL should query large datasets', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT * FROM ' + postgreSqlTable.name + ' WHERE "Key" = $1', + params: { + p1: postgreSqlExpectedRow.Key, + }, + }; + + PG_DATABASE.run(query, (err, rows) => { + assert.ifError(err); + + const row = rows[0].toJSON(); + + assert.strictEqual(row.Key, postgreSqlExpectedRow.Key); + assert.strictEqual( + row.StringValue, + postgreSqlExpectedRow.StringValue + ); + + row.BytesValue = base64ToBuffer(row.BytesValue); + assert.deepStrictEqual( + row.BytesValue, + postgreSqlExpectedRow.BytesValue + ); done(); }); @@ -3416,7 +4558,7 @@ describe('Spanner', () => { Name: generateName('name'), }; - it('should update a row', done => { + const updateRow = (done, table) => { const row = { SingerId: ROW.SingerId, Name: generateName('name'), @@ -3441,9 +4583,20 @@ describe('Spanner', () => { ); }); }); + }; + + it('GOOGLE_STANDARD_SQL should update a row', done => { + updateRow(done, googleSqlTable); + }); + + it('POSTGRESQL should update a row', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + updateRow(done, postgreSqlTable); }); - it('should insert a row', done => { + const insertRow = (done, table) => { table.upsert(ROW, err => { assert.ifError(err); @@ -3459,43 +4612,66 @@ describe('Spanner', () => { } ); }); + }; + + it('GOOGLE_STANDARD_SQL should update a row', done => { + insertRow(done, googleSqlTable); + }); + + it('POSTGRESQL should update a row', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + insertRow(done, postgreSqlTable); }); }); describe('read', () => { const TABLE_NAME = 'ReadTestTable'; - const table = DATABASE.table('ReadTestTable'); + const googleSqlTable = DATABASE.table(TABLE_NAME); + const postgreSqlTable = PG_DATABASE.table(TABLE_NAME); const ALL_COLUMNS = ['Key', 'StringValue']; - before(() => { - return table - .create( - ` + before(async () => { + const googleSqlCreateTable = await googleSqlTable.create( + ` CREATE TABLE ${TABLE_NAME} ( Key STRING(MAX) NOT NULL, StringValue STRING(MAX) ) PRIMARY KEY (Key)`, - GAX_OPTIONS - ) - .then(onPromiseOperationComplete) - .then(() => { - return DATABASE.updateSchema(` + GAX_OPTIONS + ); + await onPromiseOperationComplete(googleSqlCreateTable); + const googleSqlCreateIndex = await DATABASE.updateSchema(` CREATE INDEX ReadByValue ON ${TABLE_NAME}(StringValue)`); - }) - .then(onPromiseOperationComplete) - .then(() => { - const data: Array<{}> = []; + await onPromiseOperationComplete(googleSqlCreateIndex); - for (let i = 0; i < 15; ++i) { - data.push({ - Key: 'k' + i, - StringValue: 'v' + i, - }); - } + const data: Array<{}> = []; - return table.insert(data); + for (let i = 0; i < 15; ++i) { + data.push({ + Key: 'k' + i, + StringValue: 'v' + i, }); + } + await googleSqlTable.insert(data); + + if (!IS_EMULATOR_ENABLED) { + const postgreSqlCreateTable = await postgreSqlTable.create( + ` + CREATE TABLE ${TABLE_NAME} ( + "Key" VARCHAR NOT NULL PRIMARY KEY, + "StringValue" VARCHAR + )`, + GAX_OPTIONS + ); + await onPromiseOperationComplete(postgreSqlCreateTable); + const postgreSqlCreateIndex = await PG_DATABASE.updateSchema(` + CREATE INDEX ReadByValue ON ${TABLE_NAME}("StringValue")`); + await onPromiseOperationComplete(postgreSqlCreateIndex); + await postgreSqlTable.insert(data); + } }); // all of these tests require testing with and without an index, @@ -3715,15 +4891,25 @@ describe('Spanner', () => { }, ].forEach(test => { // test normally - it(test.test, done => { - table.read(test.query as ReadRequest, (err, rows) => { + it(`GOOGLE_STANDARD_SQL ${test.test}`, done => { + googleSqlTable.read(test.query as ReadRequest, (err, rows) => { + test.assertions(err, rows); + done(); + }); + }); + + it(`POSTGRESQL ${test.test}`, function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + postgreSqlTable.read(test.query as ReadRequest, (err, rows) => { test.assertions(err, rows); done(); }); }); // test using an index - it(test.test + ' with an index', done => { + const readUsingIndex = (done, test, table) => { const query = extend( { index: 'ReadByValue', @@ -3754,6 +4940,17 @@ describe('Spanner', () => { test.assertions(err, rows); done(); }); + }; + + it(`GOOGLE_STANDARD_SQL ${test.test}` + ' with an index', done => { + readUsingIndex(done, test, googleSqlTable); + }); + + it(`POSTGRESQL ${test.test}` + ' with an index', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readUsingIndex(done, test, postgreSqlTable); }); }); @@ -3772,8 +4969,8 @@ describe('Spanner', () => { }); }); - it('should read over invalid table fails', done => { - const table = DATABASE.table('ReadTestTablezzz'); + const readInvalidTable = (done, database) => { + const table = database.table('ReadTestTablezzz'); const query = { keys: ['k1'], @@ -3784,9 +4981,20 @@ describe('Spanner', () => { assert.strictEqual(err.code, 5); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should read over invalid table fails', done => { + readInvalidTable(done, DATABASE); + }); + + it('POSTGRESQL should read over invalid table fails', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readInvalidTable(done, PG_DATABASE); }); - it('should read over invalid column fails', done => { + const readInvalidColumn = (done, table) => { const query = { keys: ['k1'], columns: ['ohnoes'], @@ -3796,14 +5004,20 @@ describe('Spanner', () => { assert.strictEqual(err.code, 5); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should read over invalid column fails', done => { + readInvalidColumn(done, googleSqlTable); }); - it('should fail if deadline exceeds', function (done) { - // This test case somehow causes the emulator to return a RESOURCE_EXHAUSTED - // error for this or following gRPC calls. + it('POSTGRESQL should read over invalid column fails', function (done) { if (IS_EMULATOR_ENABLED) { this.skip(); } + readInvalidColumn(done, postgreSqlTable); + }); + + const failDeadlineExceed = (done, table) => { const query = { keys: ['k1'], columns: ALL_COLUMNS, @@ -3816,6 +5030,22 @@ describe('Spanner', () => { assert.strictEqual(err.code, 4); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should fail if deadline exceeds', function (done) { + // This test case somehow causes the emulator to return a RESOURCE_EXHAUSTED + // error for this or following gRPC calls. + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + failDeadlineExceed(done, googleSqlTable); + }); + + it('POSTGRESQL should fail if deadline exceeds', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + failDeadlineExceed(done, postgreSqlTable); }); }); }); @@ -3978,43 +5208,62 @@ describe('Spanner', () => { describe('Transactions', () => { const TABLE_NAME = 'TxnTable'; - const table = DATABASE.table(TABLE_NAME); + const googleSqlTable = DATABASE.table(TABLE_NAME); + const postgreSqlTable = PG_DATABASE.table(TABLE_NAME); - const schema = ` + const googleSqlSchema = ` CREATE TABLE ${TABLE_NAME} ( Key STRING(MAX) NOT NULL, StringValue STRING(MAX), NumberValue INT64 ) PRIMARY KEY (Key) `; + const postgreSqlSchema = ` + CREATE TABLE ${TABLE_NAME} ( + "Key" VARCHAR NOT NULL PRIMARY KEY, + "StringValue" VARCHAR , + "NumberValue" BIGINT + )`; // eslint-disable-next-line @typescript-eslint/no-explicit-any - const records: any[] = []; + const googleSqlRecords = []; + const postgreSqlRecords = []; before(async () => { - await onPromiseOperationComplete(await table.create(schema, GAX_OPTIONS)); - - for (let i = 0; i < 5; i++) { - const entry = {Key: `k${i}`, StringValue: `v${i}`}; + const insertRecords = async function (table, records) { + for (let i = 0; i < 5; i++) { + const entry = {Key: `k${i}`, StringValue: `v${i}`}; + + const [{commitTimestamp}] = await table.insert(entry); + const record = Object.assign(entry, { + commitTimestamp, + localTimestamp: Date.now(), + }); - const [{commitTimestamp}] = await table.insert(entry); - const record = Object.assign(entry, { - commitTimestamp, - localTimestamp: Date.now(), - }); + records.push(record); + await wait(1000); + } + }; + await onPromiseOperationComplete( + await googleSqlTable.create(googleSqlSchema, GAX_OPTIONS) + ); + await insertRecords(googleSqlTable, googleSqlRecords); - records.push(record); - await wait(1000); + if (!IS_EMULATOR_ENABLED) { + await onPromiseOperationComplete( + await postgreSqlTable.create(postgreSqlSchema, GAX_OPTIONS) + ); + await insertRecords(postgreSqlTable, postgreSqlRecords); } }); describe('snapshots', () => { - it('should run a read only transaction', done => { + const readOnlyTransaction = (done, database, records) => { const options = { strong: true, }; - DATABASE.getSnapshot(options, (err, transaction) => { + database.getSnapshot(options, (err, transaction) => { assert.ifError(err); transaction!.run(`SELECT * FROM ${TABLE_NAME}`, (err, rows) => { @@ -4025,10 +5274,21 @@ describe('Spanner', () => { done(); }); }); + }; + + it('GOOGLE_STANDARD_SQL should run a read only transaction', done => { + readOnlyTransaction(done, DATABASE, googleSqlRecords); + }); + + it('POSTGRESQL should run a read only transaction', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readOnlyTransaction(done, PG_DATABASE, postgreSqlRecords); }); - it('should read keys from a table', done => { - DATABASE.getSnapshot((err, transaction) => { + const readKeysFromTable = (done, database, table, records) => { + database.getSnapshot((err, transaction) => { assert.ifError(err); const query = { @@ -4049,14 +5309,30 @@ describe('Spanner', () => { done(); }); }); + }; + + it('GOOGLE_STANDARD_SQL should read keys from a table', done => { + readKeysFromTable(done, DATABASE, googleSqlTable, googleSqlRecords); + }); + + it('POSTGRESQL should read keys from a table', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readKeysFromTable( + done, + PG_DATABASE, + postgreSqlTable, + postgreSqlRecords + ); }); - it('should accept a read timestamp', done => { + const acceptReadTimestamp = (done, database, records) => { const options = { readTimestamp: records[0].commitTimestamp, }; - DATABASE.getSnapshot(options, (err, transaction) => { + database.getSnapshot(options, (err, transaction) => { assert.ifError(err); transaction!.run(`SELECT * FROM ${TABLE_NAME}`, (err, rows) => { @@ -4073,9 +5349,20 @@ describe('Spanner', () => { done(); }); }); + }; + + it('GOOGLE_STANDARD_SQL should accept a read timestamp', done => { + acceptReadTimestamp(done, DATABASE, googleSqlRecords); + }); + + it('POSTGRESQL should accept a read timestamp', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + acceptReadTimestamp(done, PG_DATABASE, postgreSqlRecords); }); - it('should accept a min timestamp', done => { + const acceptMinTimestamp = (done, database, records) => { const query = 'SELECT * FROM ' + TABLE_NAME; const options = { @@ -4084,42 +5371,71 @@ describe('Spanner', () => { // minTimestamp can only be used in single use transactions // so we can't use database.getSnapshot here - DATABASE.run(query, options, (err, rows) => { + database.run(query, options, (err, rows) => { assert.ifError(err); assert.strictEqual(rows!.length, records.length); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should accept a min timestamp', done => { + acceptMinTimestamp(done, DATABASE, googleSqlRecords); + }); + + it('POSTGRESQL should accept a min timestamp', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + acceptMinTimestamp(done, PG_DATABASE, postgreSqlRecords); }); - it('should accept an exact staleness', done => { + const acceptExactStaleness = (done, database, records, query) => { const options = { exactStaleness: Date.now() - records[1].localTimestamp, }; - DATABASE.getSnapshot(options, (err, transaction) => { + database.getSnapshot(options, (err, transaction) => { assert.ifError(err); - transaction!.run( - 'SELECT * FROM ' + TABLE_NAME + ' ORDER BY Key', - (err, rows) => { - assert.ifError(err); - assert.strictEqual(rows.length, 2); + transaction!.run(query, (err, rows) => { + assert.ifError(err); + assert.strictEqual(rows.length, 2); - const rowJson = rows.map(x => x.toJSON()); + const rowJson = rows.map(x => x.toJSON()); - assert.strictEqual(rowJson[0].Key, 'k0'); - assert.strictEqual(rowJson[0].StringValue, 'v0'); - assert.strictEqual(rowJson[1].Key, 'k1'); - assert.strictEqual(rowJson[1].StringValue, 'v1'); + assert.strictEqual(rowJson[0].Key, 'k0'); + assert.strictEqual(rowJson[0].StringValue, 'v0'); + assert.strictEqual(rowJson[1].Key, 'k1'); + assert.strictEqual(rowJson[1].StringValue, 'v1'); - transaction!.end(); - done(); - } - ); + transaction!.end(); + done(); + }); }); + }; + + it('GOOGLE_STANDARD_SQL should accept an exact staleness', done => { + acceptExactStaleness( + done, + DATABASE, + googleSqlRecords, + `SELECT * FROM ${TABLE_NAME} ORDER BY Key` + ); }); - it('should accept a max staleness', done => { + it('POSTGRESQL should accept an exact staleness', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + acceptExactStaleness( + done, + PG_DATABASE, + postgreSqlRecords, + `SELECT * FROM ${TABLE_NAME} ORDER BY "Key"` + ); + }); + + const acceptMaxStaleness = (done, database, records) => { const query = 'SELECT * FROM TxnTable'; const options = { @@ -4128,19 +5444,35 @@ describe('Spanner', () => { // maxStaleness can only be used in single use transactions // so we can't use database.getSnapshot here - DATABASE.run(query, options, (err, rows) => { + database.run(query, options, (err, rows) => { assert.ifError(err); assert.strictEqual(rows!.length, records.length); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should accept a max staleness', done => { + acceptMaxStaleness(done, DATABASE, googleSqlRecords); }); - it('should do a strong read with concurrent updates', done => { + it('POSTGRESQL should accept a max staleness', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + acceptMaxStaleness(done, PG_DATABASE, postgreSqlRecords); + }); + + const strongReadWithConcurrentUpdates = ( + done, + database, + table, + records + ) => { const options = { strong: true, }; - DATABASE.getSnapshot(options, (err, transaction) => { + database.getSnapshot(options, (err, transaction) => { assert.ifError(err); const query = 'SELECT * FROM TxnTable'; @@ -4170,14 +5502,40 @@ describe('Spanner', () => { ); }); }); + }; + + it('GOOGLE_STANDARD_SQL should do a strong read with concurrent updates', done => { + strongReadWithConcurrentUpdates( + done, + DATABASE, + googleSqlTable, + googleSqlRecords + ); }); - it('should do an exact read with concurrent updates', done => { + it('POSTGRESQL should do a strong read with concurrent updates', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + strongReadWithConcurrentUpdates( + done, + PG_DATABASE, + postgreSqlTable, + postgreSqlRecords + ); + }); + + const exactReadWithConcurrentUpdates = ( + done, + database, + table, + records + ) => { const options = { readTimestamp: records[records.length - 1].commitTimestamp, }; - DATABASE.getSnapshot(options, (err, transaction) => { + database.getSnapshot(options, (err, transaction) => { assert.ifError(err); const query = 'SELECT * FROM TxnTable'; @@ -4210,14 +5568,40 @@ describe('Spanner', () => { ); }); }); + }; + + it('GOOGLE_STANDARD_SQL should do a strong read with concurrent updates', done => { + exactReadWithConcurrentUpdates( + done, + DATABASE, + googleSqlTable, + googleSqlRecords + ); + }); + + it('POSTGRESQL should do a strong read with concurrent updates', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + exactReadWithConcurrentUpdates( + done, + PG_DATABASE, + postgreSqlTable, + postgreSqlRecords + ); }); - it('should read with staleness & concurrent updates', done => { + const readWithStalenessAndConcurrentUpdates = ( + done, + database, + table, + records + ) => { const options = { exactStaleness: Date.now() - records[0].localTimestamp, }; - DATABASE.getSnapshot(options, (err, transaction) => { + database.getSnapshot(options, (err, transaction) => { assert.ifError(err); const query = 'SELECT * FROM TxnTable'; @@ -4245,11 +5629,54 @@ describe('Spanner', () => { ); }); }); + }; + + it('GOOGLE_STANDARD_SQL should read with staleness & concurrent updates', done => { + readWithStalenessAndConcurrentUpdates( + done, + DATABASE, + googleSqlTable, + googleSqlRecords + ); + }); + + it('POSTGRESQL should read with staleness & concurrent updates', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readWithStalenessAndConcurrentUpdates( + done, + PG_DATABASE, + postgreSqlTable, + postgreSqlRecords + ); }); }); describe('dml', () => { before(done => { + const postgresUpdateDmlDatabase = () => { + PG_DATABASE.runTransaction((err, transaction) => { + assert.ifError(err); + + transaction!.runUpdate( + { + sql: + 'INSERT INTO ' + + TABLE_NAME + + ' ("Key", "StringValue") VALUES($1, $2)', + params: { + p1: 'k999', + p2: 'abc', + }, + }, + err => { + assert.ifError(err); + transaction!.commit(done); + } + ); + }); + }; DATABASE.runTransaction((err, transaction) => { assert.ifError(err); @@ -4266,210 +5693,331 @@ describe('Spanner', () => { }, err => { assert.ifError(err); - transaction!.commit(done); + if (!IS_EMULATOR_ENABLED) { + transaction!.commit(postgresUpdateDmlDatabase); + } else { + transaction!.commit(done); + } } ); }); }); - it('should return rowCount from runUpdate', done => { - DATABASE.runTransaction((err, transaction) => { + const rowCountRunUpdate = (done, database, query) => { + database.runTransaction((err, transaction) => { assert.ifError(err); - transaction!.runUpdate( - { - sql: - 'UPDATE ' + - TABLE_NAME + - ' t SET t.StringValue = @str WHERE t.Key = @key', - params: { - key: 'k999', - str: 'abcd', - }, - }, - (err, rowCount) => { - assert.ifError(err); - assert.strictEqual(rowCount, 1); - transaction!.rollback(done); - } - ); + transaction!.runUpdate(query, (err, rowCount) => { + assert.ifError(err); + assert.strictEqual(rowCount, 1); + transaction!.rollback(done); + }); }); + }; + it('GOOGLE_STANDARD_SQL should return rowCount from runUpdate', done => { + const query = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' t SET t.StringValue = @str WHERE t.Key = @key', + params: { + key: 'k999', + str: 'abcd', + }, + }; + rowCountRunUpdate(done, DATABASE, query); }); - it('should return rowCount from run', done => { - DATABASE.runTransaction((err, transaction) => { + it('POSTGRESQL should return rowCount from runUpdate', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "StringValue" = $1 WHERE "Key" = $2', + params: { + p1: 'abcd', + p2: 'k999', + }, + }; + rowCountRunUpdate(done, PG_DATABASE, query); + }); + + const rowCountRun = (done, database, query) => { + database.runTransaction((err, transaction) => { assert.ifError(err); - transaction!.run( - { - sql: - 'UPDATE ' + - TABLE_NAME + - ' t SET t.StringValue = @str WHERE t.Key = @key', - params: { - key: 'k999', - str: 'abcd', - }, - }, - (err, row, stats) => { - assert.ifError(err); + transaction!.run(query, (err, row, stats) => { + assert.ifError(err); - const rowCount = Math.floor(stats[stats.rowCount!] as number); - assert.strictEqual(rowCount, 1); + const rowCount = Math.floor(stats[stats.rowCount!] as number); + assert.strictEqual(rowCount, 1); - transaction!.rollback(done); - } - ); + transaction!.rollback(done); + }); }); + }; + it('GOOGLE_STANDARD_SQL should return rowCount from run', done => { + const query = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' t SET t.StringValue = @str WHERE t.Key = @key', + params: { + key: 'k999', + str: 'abcd', + }, + }; + rowCountRun(done, DATABASE, query); }); - it('should exec multiple dml statements on the same txn', done => { - const key = 'k1000'; - const str = 'abcd'; - const num = 11; + it('POSTGRESQL should return rowCount from run', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "StringValue" = $1 WHERE "Key" = $2', + params: { + p1: 'abcd', + p2: 'k999', + }, + }; + rowCountRun(done, PG_DATABASE, query); + }); - DATABASE.runTransaction((err, transaction) => { + const multipleDmlOnTxn = ( + done, + database, + insertQuery, + updateQuery, + selectQuery + ) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction! - .runUpdate({ - sql: - 'INSERT INTO ' + - TABLE_NAME + - ' (Key, StringValue) VALUES (@key, @str)', - params: {key, str}, - }) + .runUpdate(insertQuery) .then(data => { const rowCount = data[0]; assert.strictEqual(rowCount, 1); - return transaction!.runUpdate({ - sql: - 'UPDATE ' + - TABLE_NAME + - ' t SET t.NumberValue = @num WHERE t.KEY = @key', - params: {key, num}, - }); + return transaction!.runUpdate(updateQuery); }) .then(data => { const rowCount = data[0]; assert.strictEqual(rowCount, 1); - return transaction!.run({ - sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE Key = @key', - params: {key}, - }); + return transaction!.run(selectQuery); }) .then(data => { const rows = data[0].map(row => row.toJSON()); assert.strictEqual(rows.length, 1); assert.deepStrictEqual(rows[0], { - Key: key, - StringValue: str, - NumberValue: num, + Key: 'k1000', + StringValue: 'abcd', + NumberValue: 11, }); return transaction!.rollback(); }) .then(() => done(), done); }); + }; + + it('GOOGLE_STANDARD_SQL should exec multiple dml statements on the same txn', done => { + const key = 'k1000'; + const str = 'abcd'; + const num = 11; + const insertQuery = { + sql: + 'INSERT INTO ' + + TABLE_NAME + + ' (Key, StringValue) VALUES (@key, @str)', + params: {key, str}, + }; + const updateQuery = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' t SET t.NumberValue = @num WHERE t.KEY = @key', + params: {key, num}, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE Key = @key', + params: {key}, + }; + multipleDmlOnTxn(done, DATABASE, insertQuery, updateQuery, selectQuery); }); - it('should show dml changes in query results', done => { - const key = 'k999'; + it('POSTGRESQL should exec multiple dml statements on the same txn', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const key = 'k1000'; const str = 'abcd'; + const num = 11; + const insertQuery = { + sql: + 'INSERT INTO ' + + TABLE_NAME + + ' ("Key", "StringValue") VALUES ($1, $2)', + params: {p1: key, p2: str}, + }; + const updateQuery = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "NumberValue" = $1 WHERE "Key" = $2', + params: {p1: num, p2: key}, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE "Key" = $1', + params: {p1: key}, + }; + multipleDmlOnTxn( + done, + PG_DATABASE, + insertQuery, + updateQuery, + selectQuery + ); + }); - DATABASE.runTransaction((err, transaction) => { + const dmlChangesInQueryResults = ( + done, + database, + updateQuery, + selectQuery + ) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction! - .runUpdate({ - sql: - 'UPDATE ' + - TABLE_NAME + - ' t SET t.StringValue = @str WHERE t.Key = @key', - params: {key, str}, - }) + .runUpdate(updateQuery) .then(() => { - return transaction!.run({ - sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE Key = @key', - params: {key}, - }); + return transaction!.run(selectQuery); }) .then(data => { const rows = data[0].map(row => row.toJSON()); assert.strictEqual(rows.length, 1); - assert.strictEqual(rows[0].StringValue, str); + assert.strictEqual(rows[0].StringValue, 'abcd'); }) .then(() => transaction!.rollback(done), done); }); + }; + + it('GOOGLE_STANDARD_SQL should show dml changes in query results', done => { + const key = 'k999'; + const str = 'abcd'; + const updateQuery = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' t SET t.StringValue = @str WHERE t.Key = @key', + params: {key, str}, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE Key = @key', + params: {key}, + }; + dmlChangesInQueryResults(done, DATABASE, updateQuery, selectQuery); }); - it('should rollback a dml statement', done => { + it('POSTGRESQL should show dml changes in query results', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } const key = 'k999'; const str = 'abcd'; + const updateQuery = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "StringValue" = $1 WHERE "Key" = $2', + params: {p1: str, p2: key}, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE "Key" = $1', + params: {p1: key}, + }; + dmlChangesInQueryResults(done, PG_DATABASE, updateQuery, selectQuery); + }); - DATABASE.runTransaction((err, transaction) => { + const rollbackDmlStatement = ( + done, + database, + updateQuery, + selectQuery + ) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction! - .runUpdate({ - sql: - 'UPDATE ' + - TABLE_NAME + - ' t SET t.StringValue = @str WHERE t.Key = @key', - params: {key, str}, - }) + .runUpdate(updateQuery) .then(() => transaction!.rollback()) .then(() => { - return DATABASE.run({ - sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE Key = @key', - params: {key}, - }); + return database.run(selectQuery); }) .then(data => { // eslint-disable-next-line @typescript-eslint/no-explicit-any const rows = data[0].map(row => (row as any).toJSON()); - assert.notStrictEqual(rows[0].StringValue, str); + assert.notStrictEqual(rows[0].StringValue, 'abcd'); done(); }) .catch(done); }); + }; + + it('GOOGLE_STANDARD_SQL should rollback a dml statement', done => { + const key = 'k999'; + const str = 'abcd'; + const updateQuery = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' t SET t.StringValue = @str WHERE t.Key = @key', + params: {key, str}, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE Key = @key', + params: {key}, + }; + rollbackDmlStatement(done, DATABASE, updateQuery, selectQuery); + }); + + it('POSTGRESQL should rollback a dml statement', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const key = 'k999'; + const str = 'abcd'; + const updateQuery = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "StringValue" = $1 WHERE "Key" = $2', + params: {p1: str, p2: key}, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE "Key" = $1', + params: {p1: key}, + }; + rollbackDmlStatement(done, PG_DATABASE, updateQuery, selectQuery); }); - it('should handle using both dml and insert methods', done => { - const str = 'dml+mutation'; - - DATABASE.runTransaction((err, transaction) => { + const handleDmlAndInsert = (done, database, insertQuery, selectQuery) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction! - .runUpdate({ - sql: - 'INSERT INTO ' + - TABLE_NAME + - ' (Key, StringValue) VALUES (@key, @str)', - params: { - key: 'k1001', - str, - }, - }) + .runUpdate(insertQuery) .then(() => { transaction!.insert('TxnTable', { Key: 'k1002', - StringValue: str, + StringValue: 'dml+mutation', }); return transaction!.commit(); }) .then(() => { - return DATABASE.run({ - sql: - 'SELECT * FROM ' + TABLE_NAME + ' WHERE StringValue = @str', - params: {str}, - }); + return database.run(selectQuery); }) .then(data => { const rows = data[0]; @@ -4479,68 +6027,152 @@ describe('Spanner', () => { }) .catch(done); }); + }; + + it('GOOGLE_STANDARD_SQL should handle using both dml and insert methods', done => { + const str = 'dml+mutation'; + const insertQuery = { + sql: + 'INSERT INTO ' + + TABLE_NAME + + ' (Key, StringValue) VALUES (@key, @str)', + params: { + key: 'k1001', + str, + }, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE StringValue = @str', + params: {str}, + }; + handleDmlAndInsert(done, DATABASE, insertQuery, selectQuery); + }); + + it('POSTGRESQL should handle using both dml and insert methods', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const str = 'dml+mutation'; + const insertQuery = { + sql: + 'INSERT INTO ' + + TABLE_NAME + + ' ("Key", "StringValue") VALUES ($1, $2)', + params: { + p1: 'k1001', + p2: str, + }, + }; + const selectQuery = { + sql: 'SELECT * FROM ' + TABLE_NAME + ' WHERE "StringValue" = $1', + params: {p1: str}, + }; + handleDmlAndInsert(done, PG_DATABASE, insertQuery, selectQuery); }); }); describe('pdml', () => { - it('should execute a simple pdml statement', done => { - DATABASE.runPartitionedUpdate( - { - sql: - 'UPDATE ' + - TABLE_NAME + - ' t SET t.StringValue = @str WHERE t.Key = @key', - params: { - key: 'k1', - str: 'abcde', - }, + const simplePdml = (done, database, query) => { + database.runPartitionedUpdate(query, (err, rowCount) => { + assert.ifError(err); + assert.strictEqual(rowCount, 1); + done(); + }); + }; + + it('GOOGLE_STANDARD_SQL should execute a simple pdml statement', done => { + const query = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' t SET t.StringValue = @str WHERE t.Key = @key', + params: { + key: 'k1', + str: 'abcde', }, - (err, rowCount) => { - assert.ifError(err); - assert.strictEqual(rowCount, 1); - done(); - } - ); + }; + simplePdml(done, DATABASE, query); }); - it('should execute a long running pdml statement', async function () { - if (!IS_EMULATOR_ENABLED) { + it('POSTGRESQL should execute a simple pdml statement', function (done) { + if (IS_EMULATOR_ENABLED) { this.skip(); } + const query = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "StringValue" = $1 WHERE "Key" = $2', + params: { + p1: 'abcde', + p2: 'k1', + }, + }; + simplePdml(done, PG_DATABASE, query); + }); + const longRunningPdml = async function ( + database, + updateQuery, + selectQuery + ) { const count = 10000; const tableData = new Array(count).fill(0).map((_, i) => { return {Key: `longpdml${i}`, StringValue: 'a'}; }); - const str = new Array(1000).fill('b').join('\n'); - - return DATABASE.runTransactionAsync(transaction => { - transaction.insert('TxnTable', tableData); - return transaction.commit(); - }) + return database + .runTransactionAsync(transaction => { + transaction.insert('TxnTable', tableData); + return transaction.commit(); + }) .then(() => { - return DATABASE.runPartitionedUpdate({ - sql: - 'UPDATE ' + - TABLE_NAME + - " t SET t.StringValue = @str WHERE t.StringValue = 'a'", - params: {str}, - }); + return database.runPartitionedUpdate(updateQuery); }) .then(([rowCount]) => { assert.strictEqual(rowCount, count); - return DATABASE.run({ - sql: - 'SELECT Key FROM ' + TABLE_NAME + ' WHERE StringValue = @str', - params: {str}, + return database.run(selectQuery).then(([rows]) => { + assert.strictEqual(rows.length, count); }); - }) - .then(([rows]) => { - assert.strictEqual(rows.length, count); }); + }; + + it('GOOGLE_STANDARD_SQL should execute a long running pdml statement', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const str = new Array(1000).fill('b').join('\n'); + const updateQuery = { + sql: + 'UPDATE ' + + TABLE_NAME + + " t SET t.StringValue = @str WHERE t.StringValue = 'a'", + params: {str}, + }; + const selectQuery = { + sql: 'SELECT Key FROM ' + TABLE_NAME + ' WHERE StringValue = @str', + params: {str}, + }; + await longRunningPdml(DATABASE, updateQuery, selectQuery); + }); + + it('POSTGRESQL should execute a long running pdml statement', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const str = new Array(1000).fill('b').join('\n'); + const updateQuery = { + sql: + 'UPDATE ' + + TABLE_NAME + + ' SET "StringValue" = $1 WHERE "StringValue" = \'a\'', + params: {p1: str}, + }; + const selectQuery = { + sql: 'SELECT "Key" FROM ' + TABLE_NAME + ' WHERE "StringValue" = $1', + params: {p1: str}, + }; + await longRunningPdml(PG_DATABASE, updateQuery, selectQuery); }); }); @@ -4549,7 +6181,7 @@ describe('Spanner', () => { const str = 'abcd'; const num = 11; - const insert = { + const googleSqlInsert = { sql: 'INSERT INTO ' + TABLE_NAME + @@ -4557,7 +6189,15 @@ describe('Spanner', () => { params: {key, str}, }; - const update = { + const postgreSqlInsert = { + sql: + 'INSERT INTO ' + + TABLE_NAME + + ' ("Key", "StringValue") VALUES ($1, $2)', + params: {p1: key, p2: str}, + }; + + const googleSqlUpdate = { sql: 'UPDATE ' + TABLE_NAME + @@ -4565,26 +6205,48 @@ describe('Spanner', () => { params: {key, num}, }; + const posgreSqlUpdate = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "NumberValue" = $1 WHERE "Key" = $2', + params: {p1: num, p2: key}, + }; + // this should fail since we're not binding params - const borked = { + const googleSqlBorked = { sql: 'UPDATE ' + TABLE_NAME + ' t SET t.NumberValue = @num WHERE t.KEY = @key', }; - it('should execute a single statement', async () => { - const rowCounts = await DATABASE.runTransactionAsync(async txn => { + const postgreSqlBorked = { + sql: + 'UPDATE ' + TABLE_NAME + ' SET "NumberValue" = $1 WHERE "Key" = $2', + }; + + const executeSingleStatement = async (database, insert) => { + const rowCounts = await database.runTransactionAsync(async txn => { const [rowCounts] = await txn.batchUpdate([insert]); await txn.rollback(); return rowCounts; }); assert.deepStrictEqual(rowCounts, [1]); + }; + + it('GOOGLE_STANDARD_SQL should execute a single statement', async () => { + await executeSingleStatement(DATABASE, googleSqlInsert); }); - it('should return an error when no statements are supplied', async () => { - const err = await DATABASE.runTransactionAsync(async txn => { + it('POSTGRESQL should execute a single statement', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await executeSingleStatement(PG_DATABASE, postgreSqlInsert); + }); + + const noStatementError = async database => { + const err = await database.runTransactionAsync(async txn => { let err; try { @@ -4602,20 +6264,50 @@ describe('Spanner', () => { 'batchUpdate requires at least 1 DML statement.' ); assert.strictEqual(err.code, 3); + }; + + it('GOOGLE_STANDARD_SQL should return an error when no statements are supplied', async () => { + await noStatementError(DATABASE); + }); + + it('POSTGRESQL should return an error when no statements are supplied', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await noStatementError(PG_DATABASE); }); - it('should run multiple statements that depend on each other', async () => { - const rowCounts = await DATABASE.runTransactionAsync(async txn => { + const multipleDependingStatements = async (database, insert, update) => { + const rowCounts = await database.runTransactionAsync(async txn => { const [rowCounts] = await txn.batchUpdate([insert, update]); await txn.rollback(); return rowCounts; }); assert.deepStrictEqual(rowCounts, [1, 1]); + }; + + it('GOOGLE_STANDARD_SQL should run multiple statements that depend on each other', async () => { + await multipleDependingStatements( + DATABASE, + googleSqlInsert, + googleSqlUpdate + ); + }); + + it('POSTGRESQL should run multiple statements that depend on each other', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await multipleDependingStatements( + PG_DATABASE, + postgreSqlInsert, + posgreSqlUpdate + ); }); - it('should run after a runUpdate call', async () => { - const rowCounts = await DATABASE.runTransactionAsync(async txn => { + const runAfterRunUpdate = async (database, insert, update) => { + const rowCounts = await database.runTransactionAsync(async txn => { await txn.runUpdate(insert); const [rowCounts] = await txn.batchUpdate([update]); await txn.rollback(); @@ -4623,10 +6315,21 @@ describe('Spanner', () => { }); assert.deepStrictEqual(rowCounts, [1]); + }; + + it('GOOGLE_STANDARD_SQL should run after a runUpdate call', async () => { + await runAfterRunUpdate(DATABASE, googleSqlInsert, googleSqlUpdate); + }); + + it('POSTGRESQL should run after a runUpdate call', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await runAfterRunUpdate(PG_DATABASE, postgreSqlInsert, posgreSqlUpdate); }); - it('should run before a runUpdate call', async () => { - const rowCounts = await DATABASE.runTransactionAsync(async txn => { + const runBeforeRunUpdate = async (database, insert, update) => { + const rowCounts = await database.runTransactionAsync(async txn => { const [rowCounts] = await txn.batchUpdate([insert]); await txn.runUpdate(update); await txn.rollback(); @@ -4634,10 +6337,30 @@ describe('Spanner', () => { }); assert.deepStrictEqual(rowCounts, [1]); + }; + + it('GOOGLE_STANDARD_SQL should run before a runUpdate call', async () => { + await runBeforeRunUpdate(DATABASE, googleSqlInsert, googleSqlUpdate); + }); + + it('POSTGRESQL should run before a runUpdate call', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await runBeforeRunUpdate( + PG_DATABASE, + postgreSqlInsert, + posgreSqlUpdate + ); }); - it('should stop executing statements if an error occurs', async () => { - const err = await DATABASE.runTransactionAsync(async txn => { + const stopExecutingStatementsIfError = async ( + database, + insert, + borked, + update + ) => { + const err = await database.runTransactionAsync(async txn => { let err; try { @@ -4656,10 +6379,35 @@ describe('Spanner', () => { assert.strictEqual(err.code, grpc.status.INVALID_ARGUMENT); assert.deepStrictEqual(err.rowCounts, [1]); + }; + + it('GOOGLE_STANDARD_SQL should stop executing statements if an error occurs', async () => { + await stopExecutingStatementsIfError( + DATABASE, + googleSqlInsert, + googleSqlBorked, + googleSqlUpdate + ); + }); + + it('POSTGRESQL should stop executing statements if an error occurs', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await stopExecutingStatementsIfError( + PG_DATABASE, + postgreSqlInsert, + postgreSqlBorked, + posgreSqlUpdate + ); }); - it('should ignore any additional statement errors', async () => { - const err = await DATABASE.runTransactionAsync(async txn => { + const ignoreAdditionalStatementErrors = async ( + database, + insert, + borked + ) => { + const err = await database.runTransactionAsync(async txn => { let err; try { @@ -4674,12 +6422,31 @@ describe('Spanner', () => { assert.strictEqual(err.code, 3); assert.deepStrictEqual(err.rowCounts, [1]); + }; + + it('GOOGLE_STANDARD_SQL should ignore any additional statement errors', async () => { + await ignoreAdditionalStatementErrors( + DATABASE, + googleSqlInsert, + googleSqlBorked + ); + }); + + it('POSTGRESQL should ignore any additional statement errors', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + await ignoreAdditionalStatementErrors( + PG_DATABASE, + postgreSqlInsert, + postgreSqlBorked + ); }); }); describe('read/write', () => { - it('should throw an error for mismatched columns', done => { - DATABASE.runTransaction((err, transaction) => { + const mismatchedColumnError = (done, database, table) => { + database.runTransaction((err, transaction) => { assert.ifError(err); const rows = [ @@ -4708,10 +6475,21 @@ describe('Spanner', () => { transaction!.end(); done(); }); + }; + + it('GOOGLE_STANDARD_SQL should throw an error for mismatched columns', done => { + mismatchedColumnError(done, DATABASE, googleSqlTable); }); - it('should commit a transaction', done => { - DATABASE.runTransaction((err, transaction) => { + it('POSTGRESQL should throw an error for mismatched columns', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + mismatchedColumnError(done, PG_DATABASE, postgreSqlTable); + }); + + const commitTransaction = (done, database, table) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction!.insert(table.name, { @@ -4721,10 +6499,21 @@ describe('Spanner', () => { transaction!.commit(done); }); + }; + + it('GOOGLE_STANDARD_SQL should commit a transaction', done => { + commitTransaction(done, DATABASE, googleSqlTable); }); - it('should rollback a transaction', done => { - DATABASE.runTransaction((err, transaction) => { + it('POSTGRESQL should commit a transaction', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + commitTransaction(done, PG_DATABASE, postgreSqlTable); + }); + + const rollbackTransaction = (done, database) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction!.run('SELECT * FROM TxnTable', err => { @@ -4732,6 +6521,17 @@ describe('Spanner', () => { transaction!.rollback(done); }); }); + }; + + it('GOOGLE_STANDARD_SQL should rollback a transaction', done => { + rollbackTransaction(done, DATABASE); + }); + + it('POSTGRESQL should rollback a transaction', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + rollbackTransaction(done, PG_DATABASE); }); describe('concurrent transactions', () => { @@ -4741,14 +6541,13 @@ describe('Spanner', () => { }; beforeEach(() => { - return table.update(defaultRowValues); + return googleSqlTable.update(defaultRowValues).then(() => { + postgreSqlTable.update(defaultRowValues); + }); }); - it('should handle concurrent transactions with read', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - DATABASE.runTransaction((err, transaction) => { + const readConcurrentTransaction = (done, database, table) => { + database.runTransaction((err, transaction) => { assert.ifError(err); incrementValue(err => { @@ -4763,7 +6562,7 @@ describe('Spanner', () => { }); function incrementValue(callback) { - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { assert.ifError(err); getValue(transaction, (err, value) => { @@ -4800,13 +6599,24 @@ describe('Spanner', () => { } ); } + }; + + it('GOOGLE_STANDARD_SQL should handle concurrent transactions with read', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + readConcurrentTransaction(done, DATABASE, googleSqlTable); }); - it('should handle concurrent transactions with query', function (done) { + it('POSTGRESQL should handle concurrent transactions with read', function (done) { if (IS_EMULATOR_ENABLED) { this.skip(); } - DATABASE.runTransaction((err, transaction) => { + readConcurrentTransaction(done, PG_DATABASE, postgreSqlTable); + }); + + const queryConcurrentTransaction = (done, database, table, query) => { + database.runTransaction((err, transaction) => { assert.ifError(err); incrementValue(err => { @@ -4821,7 +6631,7 @@ describe('Spanner', () => { }); function incrementValue(callback) { - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { assert.ifError(err); getValue(transaction, (err, value) => { @@ -4841,43 +6651,60 @@ describe('Spanner', () => { } function getValue(txn, callback) { - txn.run( - { - sql: 'SELECT * FROM ' + table.name + ' WHERE Key = @key', - params: { - key: defaultRowValues.Key, - }, - }, - (err, rows) => { - if (err) { - callback(err); - return; - } - - const row = rows[0].toJSON(); - callback(null, row.NumberValue); + txn.run(query, (err, rows) => { + if (err) { + callback(err); + return; } - ); + + const row = rows[0].toJSON(); + callback(null, row.NumberValue); + }); + } + }; + + it('GOOGLE_STANDARD_SQL should handle concurrent transactions with query', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const query = { + sql: 'SELECT * FROM ' + googleSqlTable.name + ' WHERE Key = @key', + params: { + key: defaultRowValues.Key, + }, + }; + queryConcurrentTransaction(done, DATABASE, googleSqlTable, query); + }); + + it('POSTGRESQL should handle concurrent transactions with query', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); } + const query = { + sql: 'SELECT * FROM ' + postgreSqlTable.name + ' WHERE "Key" = $1', + params: { + p1: defaultRowValues.Key, + }, + }; + queryConcurrentTransaction(done, PG_DATABASE, postgreSqlTable, query); }); }); - it('should retry an aborted txn when reading fails', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } + const retryAbortedTxnWhenReadingFails = ( + done, + database, + table, + query + ) => { const key = 'k888'; - const query = `SELECT * FROM ${table.name} WHERE Key = '${key}'`; - let attempts = 0; - const expectedRow = { Key: key, NumberValue: null, StringValue: 'abc', }; - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction!.run(query, err => { @@ -4915,7 +6742,7 @@ describe('Spanner', () => { }); function runOtherTransaction(callback) { - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { if (err) { callback(err); return; @@ -4936,14 +6763,38 @@ describe('Spanner', () => { function wrap(callback) { setImmediate(callback); } + }; + + it('GOOGLE_STANDARD_SQL should retry an aborted txn when reading fails', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const key = 'k888'; + const query = `SELECT * FROM ${googleSqlTable.name} WHERE Key = '${key}'`; + retryAbortedTxnWhenReadingFails(done, DATABASE, googleSqlTable, query); }); - it('should retry an aborted txn when commit fails', function (done) { + it('POSTGRESQL should retry an aborted txn when reading fails', function (done) { if (IS_EMULATOR_ENABLED) { this.skip(); } + const key = 'k888'; + const query = { + sql: `SELECT * FROM ${postgreSqlTable.name} WHERE "Key" = $1`, + params: { + p1: key, + }, + }; + retryAbortedTxnWhenReadingFails( + done, + PG_DATABASE, + postgreSqlTable, + query + ); + }); + + const retryAbortedTxnWhenCommitFails = (done, database, table, query) => { const key = 'k9999'; - const query = `SELECT * FROM ${table.name} WHERE Key = '${key}'`; let attempts = 0; const expectedRow = { @@ -4952,7 +6803,7 @@ describe('Spanner', () => { StringValue: 'abc', }; - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { assert.ifError(err); transaction!.run(query, (err, rows) => { @@ -4988,7 +6839,7 @@ describe('Spanner', () => { }); function runOtherTransaction(callback) { - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { if (err) { callback(err); return; @@ -5005,13 +6856,37 @@ describe('Spanner', () => { }); }); } + }; + + it('GOOGLE_STANDARD_SQL should retry an aborted txn when commit fails', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const key = 'k9999'; + const query = `SELECT * FROM ${googleSqlTable.name} WHERE Key = '${key}'`; + retryAbortedTxnWhenCommitFails(done, DATABASE, googleSqlTable, query); }); - it('should return a deadline error instead of aborted', function (done) { + it('POSTGRESQL should retry an aborted txn when commit fails', function (done) { if (IS_EMULATOR_ENABLED) { this.skip(); } + const key = 'k9999'; + const query = { + sql: `SELECT * FROM ${postgreSqlTable.name} WHERE "Key" = $1`, + params: { + p1: key, + }, + }; + retryAbortedTxnWhenCommitFails( + done, + PG_DATABASE, + postgreSqlTable, + query + ); + }); + const deadlineErrorInsteadOfAbort = (done, database, table) => { const options = { timeout: 10, }; @@ -5019,7 +6894,7 @@ describe('Spanner', () => { const query = `SELECT * FROM ${table.name}`; let attempts = 0; - DATABASE.runTransaction(options, (err, transaction) => { + database.runTransaction(options, (err, transaction) => { if (attempts++ === 1) { assert.strictEqual(err!.code, 4); assert( @@ -5050,7 +6925,7 @@ describe('Spanner', () => { }); function runOtherTransaction(callback) { - DATABASE.runTransaction((err, transaction) => { + database.runTransaction((err, transaction) => { if (err) { callback(err); return; @@ -5070,6 +6945,20 @@ describe('Spanner', () => { }); }); } + }; + + it('GOOGLE_STANDARD_SQL should return a deadline error instead of aborted', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + deadlineErrorInsteadOfAbort(done, DATABASE, googleSqlTable); + }); + + it('POSTGRESQL should return a deadline error instead of aborted', function (done) { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + deadlineErrorInsteadOfAbort(done, PG_DATABASE, postgreSqlTable); }); }); });