diff --git a/packages/extension-driver-canner/.eslintrc.json b/packages/extension-driver-canner/.eslintrc.json new file mode 100644 index 000000000..9d9c0db55 --- /dev/null +++ b/packages/extension-driver-canner/.eslintrc.json @@ -0,0 +1,18 @@ +{ + "extends": ["../../.eslintrc.json"], + "ignorePatterns": ["!**/*"], + "overrides": [ + { + "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], + "rules": {} + }, + { + "files": ["*.ts", "*.tsx"], + "rules": {} + }, + { + "files": ["*.js", "*.jsx"], + "rules": {} + } + ] +} diff --git a/packages/extension-driver-canner/README.md b/packages/extension-driver-canner/README.md new file mode 100644 index 000000000..c5bdd4dcd --- /dev/null +++ b/packages/extension-driver-canner/README.md @@ -0,0 +1,56 @@ +# extension-driver-canner + +Connect to [canner enterprise](https://docs.cannerdata.com/product/api_sdk/pg/pg_overview) through PostgreSQL Wire Protocol + +## Install + +1. Install package + + ```sql + npm i @vulcan-sql/extension-driver-canner + ``` + +2. Update `vulcan.yaml`, enable the extension. + + ```yaml + extensions: + canner: '@vulcan-sql/extension-driver-canner' + ``` + +3. Create a new profile in `profiles.yaml` or in your profiles' paths. + + ```yaml + - name: canner # profile name + type: canner + connection: + + + # Optional: Server host. + host: string + # Optional: The user to connect to canner enterprise. Default canner + user: string + # Optional: Password to connect to canner enterprise. should be the user PAT in canner enterprise + password: string + # Optional: sql name of the workspace. + database: string + # Optional: canner enterprise PostgreSQL wire protocol port + port: 7432 + # Optional: The max rows we should fetch once. + chunkSize: 100 + # Optional: Maximum number of clients the pool should contain. + max: 10 + # Optional: Number of milliseconds before a statement in query will time out, default is no timeout + statement_timeout: 0 + # Optional: Passed directly to node.TLSSocket, supports all tls.connect options + ssl: false + # Optional: Number of milliseconds before a query call will timeout, default is no timeout + query_timeout: 0 + # Optional: The name of the application that created this Client instance + application_name: string + # Optional: Number of milliseconds to wait for connection, default is no timeout + connectionTimeoutMillis: 0 + # Optional: Number of milliseconds before terminating any session with an open idle transaction, default is no timeout + idle_in_transaction_session_timeout: 0 + # Optional: Number of milliseconds a client must sit idle in the pool and not be checked out before it is disconnected from the backend and discarded. + idleTimeoutMillis: 10000 + ``` diff --git a/packages/extension-driver-canner/jest.config.ts b/packages/extension-driver-canner/jest.config.ts new file mode 100644 index 000000000..a14cb0317 --- /dev/null +++ b/packages/extension-driver-canner/jest.config.ts @@ -0,0 +1,14 @@ +module.exports = { + displayName: 'extension-driver-canner', + preset: '../../jest.preset.ts', + globals: { + 'ts-jest': { + tsconfig: '/tsconfig.spec.json', + }, + }, + transform: { + '^.+\\.[tj]s$': 'ts-jest', + }, + moduleFileExtensions: ['ts', 'js', 'html', 'node'], + coverageDirectory: '../../coverage/packages/extension-driver-canner', +}; diff --git a/packages/extension-driver-canner/package.json b/packages/extension-driver-canner/package.json new file mode 100644 index 000000000..a30c34f57 --- /dev/null +++ b/packages/extension-driver-canner/package.json @@ -0,0 +1,29 @@ +{ + "name": "@vulcan-sql/extension-driver-canner", + "description": "Canner Enterprise driver for Vulcan SQL", + "version": "0.4.0", + "type": "commonjs", + "publishConfig": { + "access": "public" + }, + "keywords": [ + "vulcan", + "vulcan-sql", + "data", + "sql", + "database", + "data-warehouse", + "data-lake", + "api-builder", + "postgres", + "pg" + ], + "repository": { + "type": "git", + "url": "https://github.com/Canner/vulcan.git" + }, + "license": "MIT", + "peerDependencies": { + "@vulcan-sql/core": "~0.4.0-0" + } +} diff --git a/packages/extension-driver-canner/project.json b/packages/extension-driver-canner/project.json new file mode 100644 index 000000000..13cd2b238 --- /dev/null +++ b/packages/extension-driver-canner/project.json @@ -0,0 +1,64 @@ +{ + "root": "packages/extension-driver-canner", + "sourceRoot": "packages/extension-driver-canner/src", + "targets": { + "build": { + "executor": "@nrwl/workspace:run-commands", + "options": { + "command": "yarn ts-node ./tools/scripts/replaceAlias.ts extension-driver-canner" + }, + "dependsOn": [ + { + "projects": "self", + "target": "tsc" + } + ] + }, + "tsc": { + "executor": "@nrwl/js:tsc", + "outputs": ["{options.outputPath}"], + "options": { + "outputPath": "dist/packages/extension-driver-canner", + "main": "packages/extension-driver-canner/src/index.ts", + "tsConfig": "packages/extension-driver-canner/tsconfig.lib.json", + "assets": ["packages/extension-driver-canner/*.md"], + "buildableProjectDepsInPackageJsonType": "dependencies" + }, + "dependsOn": [ + { + "projects": "dependencies", + "target": "build" + } + ] + }, + "lint": { + "executor": "@nrwl/linter:eslint", + "outputs": ["{options.outputFile}"], + "options": { + "lintFilePatterns": ["packages/extension-driver-canner/**/*.ts"] + } + }, + "test": { + "executor": "@nrwl/jest:jest", + "outputs": ["coverage/packages/extension-driver-canner"], + "options": { + "jestConfig": "packages/extension-driver-canner/jest.config.ts", + "passWithNoTests": true + } + }, + "publish": { + "executor": "@nrwl/workspace:run-commands", + "options": { + "command": "node ../../../tools/scripts/publish.mjs {args.tag} {args.version}", + "cwd": "dist/packages/extension-driver-canner" + }, + "dependsOn": [ + { + "projects": "self", + "target": "build" + } + ] + } + }, + "tags": [] +} diff --git a/packages/extension-driver-canner/src/index.ts b/packages/extension-driver-canner/src/index.ts new file mode 100644 index 000000000..649cead27 --- /dev/null +++ b/packages/extension-driver-canner/src/index.ts @@ -0,0 +1,3 @@ +export * from './lib/cannerDataSource'; +import { CannerDataSource } from './lib/cannerDataSource'; +export default [CannerDataSource]; diff --git a/packages/extension-driver-canner/src/lib/CannerAdapter.ts b/packages/extension-driver-canner/src/lib/CannerAdapter.ts new file mode 100644 index 000000000..31a51ebea --- /dev/null +++ b/packages/extension-driver-canner/src/lib/CannerAdapter.ts @@ -0,0 +1,100 @@ +import axios from 'axios'; +import { PGOptions } from './cannerDataSource'; + +export class CannerAdapter { + public readonly host: string; + public readonly workspaceSqlName: string; + public readonly PAT: string | (() => string | Promise); + private baseUrl: string | undefined; + + constructor(options?: PGOptions) { + if (!options) { + throw new Error(`connection options is required`); + } + const { host, database, password } = options; + if (!host || !database || !password) { + throw new Error(`host, database and password are required`); + } + this.host = host; + this.workspaceSqlName = database; + this.PAT = password; + } + + private async prepare() { + if (this.baseUrl) { + return; + } + const response = await axios({ + method: 'get', + maxBodyLength: Infinity, + url: `https://${this.host}/cluster-info`, + headers: {}, + }); + const { restfulApiBaseEndpoint } = response.data; + if (!restfulApiBaseEndpoint) { + throw new Error(`restfulApiBaseEndpoint is not found`); + } + + this.baseUrl = restfulApiBaseEndpoint; + } + + private async workspaceRequest( + method: string, + urlPath: string, + options?: Record + ) { + await this.prepare(); + const response = await axios({ + headers: { + Authorization: `Token ${this.PAT}`, + }, + params: { + workspaceSqlName: this.workspaceSqlName, + }, + url: `${this.baseUrl}${urlPath}`, + method, + ...options, + }); + return response.data; + } + + private async waitAsyncQueryToFinish(requestId: string) { + let response = await this.workspaceRequest( + 'get', + `/v2/async-queries/${requestId}` + ); + + let status = response.status; + // FINISHED & FAILED are the end state of a async request, and the result urls will be generated only after the request is finished. + while (!['FINISHED', 'FAILED'].includes(status)) { + await new Promise((resolve) => setTimeout(resolve, 1000)); + response = await this.workspaceRequest( + 'get', + `/v2/async-queries/${requestId}` + ); + status = response.status; + } + } + + private async getAsyncQueryResultUrls(requestId: string): Promise { + const data = await this.workspaceRequest( + 'get', + `/v2/async-queries/${requestId}/result/urls` + ); + return data.urls || []; + } + + public async createAsyncQueryResultUrls(sql: string): Promise { + const data = await this.workspaceRequest('post', '/v2/async-queries', { + data: { + sql, + timeout: 600, + noLimit: true, + }, + }); + const { id: requestId } = data; + await this.waitAsyncQueryToFinish(requestId); + const urls = await this.getAsyncQueryResultUrls(requestId); + return urls; + } +} diff --git a/packages/extension-driver-canner/src/lib/cannerDataSource.ts b/packages/extension-driver-canner/src/lib/cannerDataSource.ts new file mode 100644 index 000000000..e17532f31 --- /dev/null +++ b/packages/extension-driver-canner/src/lib/cannerDataSource.ts @@ -0,0 +1,203 @@ +import axios from 'axios'; +import { + DataResult, + DataSource, + ExecuteOptions, + ExportOptions, + InternalError, + RequestParameter, + VulcanExtensionId, +} from '@vulcan-sql/core'; +import { Pool, PoolConfig, QueryResult } from 'pg'; +import * as Cursor from 'pg-cursor'; +import { Readable } from 'stream'; +import { buildSQL } from './sqlBuilder'; +import { mapFromPGTypeId } from './typeMapper'; +import * as fs from 'fs'; +import * as path from 'path'; +import { CannerAdapter } from './CannerAdapter'; + +export interface PGOptions extends PoolConfig { + chunkSize?: number; +} + +@VulcanExtensionId('canner') +export class CannerDataSource extends DataSource { + private logger = this.getLogger(); + private poolMapping = new Map(); + + public override async onActivate() { + const profiles = this.getProfiles().values(); + for (const profile of profiles) { + // try to connect by pg wire protocol and make request to api server + this.logger.debug( + `Initializing profile: ${profile.name} using pg driver` + ); + //================================================================================================= + // PG wire protocol + const pool = new Pool(profile.connection); + // https://node-postgres.com/api/pool#poolconnect + // When a client is sitting idly in the pool it can still emit errors because it is connected to a live backend. + // If the backend goes down or a network partition is encountered all the idle, connected clients in your application will emit an error through the pool's error event emitter. + pool.on('error', (err) => { + this.logger.warn( + `Pool client of profile instance ${profile.name} connecting failed, detail error, ${err}` + ); + }); + await pool.query('select 1'); + this.poolMapping.set(profile.name, { + pool, + options: profile.connection, + }); + this.logger.debug(`Profile ${profile.name} initialized`); + } + } + + public override async export({ + sql, + directory, + profileName, + }: ExportOptions): Promise { + if (!this.poolMapping.has(profileName)) { + throw new InternalError(`Profile instance ${profileName} not found`); + } + // throw if dir is not exist + if (!fs.existsSync(directory)) { + throw new InternalError(`Directory ${directory} not found`); + } + const { options: connection } = this.poolMapping.get(profileName)!; + + const cannerAdapter = new CannerAdapter(connection); + try { + const presignedUrls = await cannerAdapter.createAsyncQueryResultUrls(sql); + await this.downloadFiles(presignedUrls, directory); + } catch (error: any) { + this.logger.debug('Failed to export data from canner', error); + throw error; + } + } + + private async downloadFiles(urls: string[], directory: string) { + await Promise.all( + urls.map(async (url: string, index: number) => { + const response = await axios.get(url, { + responseType: 'stream', + }); + const fileName = url.split('/').pop()?.split('?')[0] || `part${index}`; + const writeStream = fs.createWriteStream( + path.join(directory, fileName) + ); + response.data.pipe(writeStream); + return new Promise((resolve, reject) => { + writeStream.on('finish', resolve); + writeStream.on('error', reject); + }); + }) + ); + } + public async execute({ + statement: sql, + bindParams, + profileName, + operations, + }: ExecuteOptions): Promise { + if (!this.poolMapping.has(profileName)) { + throw new InternalError(`Profile instance ${profileName} not found`); + } + const { pool, options } = this.poolMapping.get(profileName)!; + this.logger.debug(`Acquiring connection from ${profileName}`); + const client = await pool.connect(); + this.logger.debug(`Acquired connection from ${profileName}`); + try { + const builtSQL = buildSQL(sql, operations); + const cursor = client.query( + new Cursor(builtSQL, Array.from(bindParams.values())) + ); + cursor.once('done', async () => { + this.logger.debug( + `Data fetched, release connection from ${profileName}` + ); + // It is important to close the cursor before releasing connection, or the connection might not able to handle next request. + await cursor.close(); + client.release(); + }); + // All promises MUST fulfilled in this function or we are not able to release the connection when error occurred + return await this.getResultFromCursor(cursor, options); + } catch (e: any) { + this.logger.debug( + `Errors occurred, release connection from ${profileName}` + ); + client.release(); + throw e; + } + } + + public async prepare({ parameterIndex }: RequestParameter) { + return `$${parameterIndex}`; + } + + public async destroy() { + for (const { pool } of this.poolMapping.values()) { + await pool.end(); + } + } + + private async getResultFromCursor( + cursor: Cursor, + options: PGOptions = {} + ): Promise { + const { chunkSize = 100 } = options; + const cursorRead = this.cursorRead.bind(this); + const firstChunk = await cursorRead(cursor, chunkSize); + // save first chunk in buffer for incoming requests + let bufferedRows = [...firstChunk.rows]; + let bufferReadIndex = 0; + const fetchNext = async () => { + if (bufferReadIndex >= bufferedRows.length) { + bufferedRows = (await cursorRead(cursor, chunkSize)).rows; + bufferReadIndex = 0; + } + return bufferedRows[bufferReadIndex++] || null; + }; + const stream = new Readable({ + objectMode: true, + read() { + fetchNext() + .then((row) => { + this.push(row); + }) + .catch((error) => { + this.destroy(error); + }); + }, + destroy(error: Error | null, cb: (error: Error | null) => void) { + // Send done event to notify upstream to release the connection. + cursor.emit('done'); + cb(error); + }, + // automatically destroy() the stream when it emits 'finish' or errors. Node > 10.16 + autoDestroy: true, + }); + return { + getColumns: () => + firstChunk.result.fields.map((field) => ({ + name: field.name, + type: mapFromPGTypeId(field.dataTypeID), + })), + getData: () => stream, + }; + } + + public async cursorRead(cursor: Cursor, maxRows: number) { + return new Promise<{ rows: any[]; result: QueryResult }>( + (resolve, reject) => { + cursor.read(maxRows, (err, rows, result) => { + if (err) { + return reject(err); + } + resolve({ rows, result }); + }); + } + ); + } +} diff --git a/packages/extension-driver-canner/src/lib/sqlBuilder.ts b/packages/extension-driver-canner/src/lib/sqlBuilder.ts new file mode 100644 index 000000000..b5b94e951 --- /dev/null +++ b/packages/extension-driver-canner/src/lib/sqlBuilder.ts @@ -0,0 +1,40 @@ +import { Parameterized, SQLClauseOperation } from '@vulcan-sql/core'; +import { isNull, isUndefined } from 'lodash'; + +const isNullOrUndefine = (value: any) => isUndefined(value) || isNull(value); + +export const removeEndingSemiColon = (sql: string) => { + return sql.replace(/;([ \n]+)?$/, ''); +}; + +export const addLimit = (sql: string, limit?: string | null) => { + if (isNullOrUndefine(limit)) return sql; + return [sql, `LIMIT`, limit].join(' '); +}; + +export const addOffset = (sql: string, offset?: string | null) => { + if (isNullOrUndefine(offset)) return sql; + return [sql, `OFFSET`, offset].join(' '); +}; + +// Check if there is no operations +export const isNoOP = ( + operations: Partial> +): boolean => { + if (!isNullOrUndefine(operations.limit)) return false; + if (!isNullOrUndefine(operations.offset)) return false; + return true; +}; + +export const buildSQL = ( + sql: string, + operations: Partial> +): string => { + if (isNoOP(operations)) return sql; + let builtSQL = ''; + builtSQL += `SELECT * FROM (${removeEndingSemiColon(sql)})`; + builtSQL = addLimit(builtSQL, operations.limit); + builtSQL = addOffset(builtSQL, operations.offset); + builtSQL += ';'; + return builtSQL; +}; diff --git a/packages/extension-driver-canner/src/lib/typeMapper.ts b/packages/extension-driver-canner/src/lib/typeMapper.ts new file mode 100644 index 000000000..d102d49a7 --- /dev/null +++ b/packages/extension-driver-canner/src/lib/typeMapper.ts @@ -0,0 +1,25 @@ +import { builtins, TypeId } from 'pg-types'; + +const typeMapping = new Map(); + +const register = (pgTypeId: TypeId, type: string) => { + typeMapping.set(pgTypeId, type); +}; + +// Reference +// https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js +// https://github.com/brianc/node-pg-types/blob/master/lib/binaryParsers.js + +register(builtins.INT8, 'number'); +register(builtins.INT4, 'number'); +register(builtins.INT2, 'number'); +register(builtins.OID, 'number'); +register(builtins.NUMERIC, 'number'); +register(builtins.FLOAT4, 'number'); // float4/real +register(builtins.FLOAT8, 'number'); // float8/double +register(builtins.BOOL, 'boolean'); + +export const mapFromPGTypeId = (pgTypeId: number) => { + if (typeMapping.has(pgTypeId)) return typeMapping.get(pgTypeId)!; + return 'string'; +}; diff --git a/packages/extension-driver-canner/test/cannerDataSource.spec.ts b/packages/extension-driver-canner/test/cannerDataSource.spec.ts new file mode 100644 index 000000000..7b3b1c289 --- /dev/null +++ b/packages/extension-driver-canner/test/cannerDataSource.spec.ts @@ -0,0 +1,334 @@ +import { CannerServer } from './cannerServer'; +import { CannerDataSource, PGOptions } from '../src'; +import { ExportOptions, streamToArray } from '@vulcan-sql/core'; +import { Readable, Writable } from 'stream'; +import * as sinon from 'ts-sinon'; +import * as fs from 'fs'; +import axios from 'axios'; +import { CannerAdapter } from '../src/lib/CannerAdapter'; + +const pg = new CannerServer(); +let dataSource: CannerDataSource; + +// restore all sinon mock/stub before each test +beforeEach(() => { + sinon.default.restore(); +}); + +it('Data source should be activate without any error when all profiles are valid', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + // Act, Assert + await expect(dataSource.activate()).resolves.not.toThrow(); +}); + +it('Data source should throw error when activating if any profile is invalid', async () => { + // Arrange + const profile1 = pg.getProfile('profile1'); + dataSource = new CannerDataSource({}, '', [ + profile1, + { + name: 'wrong-password', + type: 'canner', + connection: { + ...profile1.connection, + password: 'wrong-password', + } as PGOptions, + allow: '*', + }, + ]); + // Act, Assert + await expect(dataSource.activate()).rejects.toThrow(); +}); + +// export method should be executed successfully +it('Data source should export successfully', async () => { + function createReadableStream(content: string): Readable { + const stream = new Readable(); + stream.push(content); + stream.push(null); // Signal the end of the stream + return stream; + } + // Arrange + sinon.default + .stub(CannerAdapter.prototype, 'createAsyncQueryResultUrls') + // eslint-disable-next-line @typescript-eslint/no-unused-vars + .callsFake(async (sql) => { + return ['https://example.com/file1.parquet?aws_key=mockKey']; + }); + sinon.default.stub(axios, 'get').callsFake(async () => { + return { data: createReadableStream('123') }; + }); + + fs.mkdirSync('tmp', { recursive: true }); + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + + // Act, Assert + await expect( + dataSource.export({ + sql: 'select 1', + directory: 'tmp', + profileName: 'profile1', + } as ExportOptions) + ).resolves.not.toThrow(); + expect(fs.readdirSync('tmp').length).toBe(1); + + // clean up + fs.rmdirSync('tmp', { recursive: true }); +}, 100000); + +it('Data source should throw when fail to export data', async () => { + // Arrange + sinon.default + .stub(CannerAdapter.prototype, 'createAsyncQueryResultUrls') + // eslint-disable-next-line @typescript-eslint/no-unused-vars + .callsFake(async (sql) => { + throw new Error('mock error'); + }); + + fs.mkdirSync('tmp', { recursive: true }); + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + + // Act, Assert + await expect( + dataSource.export({ + sql: 'select 1', + directory: 'tmp', + profileName: 'profile1', + } as ExportOptions) + ).rejects.toThrow(); + expect(fs.readdirSync('tmp').length).toBe(0); + + // clean up + fs.rmdirSync('tmp', { recursive: true }); +}, 100000); + +it('Data source should throw when given directory is not exist', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + + // Act, Assert + await expect( + dataSource.export({ + sql: 'select 1', + directory: 'tmp', + profileName: 'profile1', + } as ExportOptions) + ).rejects.toThrow(); +}, 100000); + +it('Data source should throw when given profile name is not exist', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + fs.mkdirSync('tmp', { recursive: true }); + + // Act, Assert + await expect( + dataSource.export({ + sql: 'select 1', + directory: 'tmp', + profileName: 'profile not exist', + } as ExportOptions) + ).rejects.toThrow(); +}, 100000); + +it('Data source should return correct rows with 1 chunks', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + // Act + const { getData } = await dataSource.execute({ + statement: "select 123 as A, 'str' as B, true as C", + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows.length).toBe(1); +}, 30000); + +it('Data source should return correct rows', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + // Act + const { getData } = await dataSource.execute({ + statement: "select 123 as A, 'str' as B, true as C", + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows.length).toBe(1); +}, 30000); + +it('Data source should return empty data with no row', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + // Act + const { getData } = await dataSource.execute({ + statement: 'select 1 limit 0', + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows.length).toBe(0); +}, 30000); + +it('Data source should release the connection when finished no matter success or not', async () => { + // Arrange + const profile1 = pg.getProfile('profile1'); + dataSource = new CannerDataSource({}, '', [ + { + name: 'profile1', + type: 'canner', + connection: { + ...profile1.connection, + max: 1, // Limit the pool size to 1, we'll get blocked with any leak. + min: 1, + } as PGOptions, + allow: '*', + }, + ]); + await dataSource.activate(); + + // Act + // send parallel queries to test pool leak + const result = await Promise.all( + [ + async () => { + const { getData } = await dataSource.execute({ + statement: 'select 1', + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + return await streamToArray(getData()); + }, + async () => { + try { + const { getData } = await dataSource.execute({ + statement: 'select 1', + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + await streamToArray(getData()); + return [{}]; // fake data + } catch (error) { + // ignore error + return []; + } + }, + async () => { + const { getData } = await dataSource.execute({ + statement: 'select 1', + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + return await streamToArray(getData()); + }, + ].map((task) => task()) + ); + + // Assert + expect(result[0].length).toBe(1); + expect(result[1].length).toBe(1); + expect(result[2].length).toBe(1); +}, 60000); + +it('Data source should work with prepare statements', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + // Act + const bindParams = new Map(); + const var1Name = await dataSource.prepare({ + parameterIndex: 1, + value: '123', + profileName: 'profile1', + }); + bindParams.set(var1Name, '123'); + const var2Name = await dataSource.prepare({ + parameterIndex: 2, + value: '456', + profileName: 'profile1', + }); + bindParams.set(var2Name, '456'); + + const { getData } = await dataSource.execute({ + statement: `select ${var1Name} as v1, ${var2Name} as v2;`, + bindParams, + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows[0].v1).toBe('123'); + expect(rows[0].v2).toBe('456'); +}, 30000); + +it('Data source should return correct column types', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + // Act + const { getColumns, getData } = await dataSource.execute({ + statement: "select 1 as id, 'name' as name, true as enabled limit 0", + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const column = getColumns(); + // We need to destroy the data stream or the driver waits for us + const data = getData(); + data.destroy(); + + // Assert + expect(column[0]).toEqual({ name: 'id', type: 'number' }); + expect(column[1]).toEqual({ name: 'name', type: 'string' }); + expect(column[2]).toEqual({ name: 'enabled', type: 'boolean' }); +}, 30000); + +it('Data source should release connection when readable stream is destroyed', async () => { + // Arrange + dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); + await dataSource.activate(); + // Act + const { getData } = await dataSource.execute({ + statement: 'select 1', + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const readStream = getData(); + const rows: any[] = []; + let resolve: any; + const waitForStream = () => new Promise((res) => (resolve = res)); + const writeStream = new Writable({ + write(chunk, _, cb) { + rows.push(chunk); + // After read 1 records, destroy the upstream + if (rows.length === 1) { + readStream.destroy(); + resolve(); + } else cb(); + }, + objectMode: true, + }); + readStream.pipe(writeStream); + await waitForStream(); + // Assert + expect(rows.length).toBe(1); + // afterEach hook will timeout if any leak occurred. +}, 300000); diff --git a/packages/extension-driver-canner/test/cannerServer.ts b/packages/extension-driver-canner/test/cannerServer.ts new file mode 100644 index 000000000..a24c2c8e5 --- /dev/null +++ b/packages/extension-driver-canner/test/cannerServer.ts @@ -0,0 +1,25 @@ +/* istanbul ignore file */ +import { PGOptions } from '../src/lib/cannerDataSource'; + +['CANNER_HOST', 'CANNER_PAT', 'CANNER_WORKSPACE_SQL_NAME'].forEach( + (envName) => { + /* istanbul ignore next */ + if (!process.env[envName]) throw new Error(`${envName} not defined`); + } +); +export class CannerServer { + public getProfile(name: string) { + return { + name, + type: 'canner', + connection: { + host: process.env['CANNER_HOST'], + port: process.env['CANNER_PORT'] || 7432, + user: process.env['CANNER_USER'] || 'canner', + password: process.env['CANNER_PAT'], + database: process.env['CANNER_WORKSPACE_SQL_NAME'], + } as PGOptions, + allow: '*', + }; + } +} diff --git a/packages/extension-driver-canner/test/sqlBuilder.spec.ts b/packages/extension-driver-canner/test/sqlBuilder.spec.ts new file mode 100644 index 000000000..5d0424d00 --- /dev/null +++ b/packages/extension-driver-canner/test/sqlBuilder.spec.ts @@ -0,0 +1,73 @@ +import * as builder from '../src/lib/sqlBuilder'; + +describe('SQL builders components test', () => { + it('removeEndingSemiColon', async () => { + // Arrange + const statement = `SELECT * FROM users; \n `; + // Act + const result = builder.removeEndingSemiColon(statement); + // Arrange + expect(result).toBe('SELECT * FROM users'); + }); + + it('addLimit - string value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addLimit(statement, '$1'); + // Arrange + expect(result).toBe('SELECT * FROM users LIMIT $1'); + }); + + it('addLimit - null value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addLimit(statement, null); + // Arrange + expect(result).toBe('SELECT * FROM users'); + }); + + it('addOffset - string value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addOffset(statement, '$1'); + // Arrange + expect(result).toBe('SELECT * FROM users OFFSET $1'); + }); + + it('addOffset - null value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addOffset(statement, null); + // Arrange + expect(result).toBe('SELECT * FROM users'); + }); + + it('isNoOP - empty operation', async () => { + // Act + const result = builder.isNoOP({}); + // Arrange + expect(result).toBe(true); + }); + + it('isNoOP - some operations', async () => { + // Act + const results = [{ limit: '$1' }, { offset: '$1' }].map(builder.isNoOP); + // Arrange + expect(results.every((result) => result === false)).toBeTruthy(); + }); +}); + +it('BuildSQL function should build sql with operations', async () => { + // Arrange + const statement = `SELECT * FROM users;`; + // Act + const result = builder.buildSQL(statement, { limit: '$1', offset: '$2' }); + // Arrange + expect(result).toBe( + 'SELECT * FROM (SELECT * FROM users) LIMIT $1 OFFSET $2;' + ); +}); diff --git a/packages/extension-driver-canner/tsconfig.json b/packages/extension-driver-canner/tsconfig.json new file mode 100644 index 000000000..f5b85657a --- /dev/null +++ b/packages/extension-driver-canner/tsconfig.json @@ -0,0 +1,22 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "module": "commonjs", + "forceConsistentCasingInFileNames": true, + "strict": true, + "noImplicitOverride": true, + "noPropertyAccessFromIndexSignature": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true + }, + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.lib.json" + }, + { + "path": "./tsconfig.spec.json" + } + ] +} diff --git a/packages/extension-driver-canner/tsconfig.lib.json b/packages/extension-driver-canner/tsconfig.lib.json new file mode 100644 index 000000000..1925baa10 --- /dev/null +++ b/packages/extension-driver-canner/tsconfig.lib.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "declaration": true, + "types": [] + }, + "include": ["**/*.ts", "../../types/*.d.ts"], + "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"] +} diff --git a/packages/extension-driver-canner/tsconfig.spec.json b/packages/extension-driver-canner/tsconfig.spec.json new file mode 100644 index 000000000..eb72f6351 --- /dev/null +++ b/packages/extension-driver-canner/tsconfig.spec.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "module": "commonjs", + "types": ["jest", "node"] + }, + "include": [ + "jest.config.ts", + "**/*.test.ts", + "**/*.spec.ts", + "**/*.d.ts", + "../../types/*.d.ts" + ] +} diff --git a/workspace.json b/workspace.json index 852e18d86..e37e0eb00 100644 --- a/workspace.json +++ b/workspace.json @@ -12,6 +12,7 @@ "extension-driver-duckdb": "packages/extension-driver-duckdb", "extension-driver-pg": "packages/extension-driver-pg", "extension-driver-snowflake": "packages/extension-driver-snowflake", + "extension-driver-canner": "packages/extension-driver-canner", "integration-testing": "packages/integration-testing", "serve": "packages/serve", "test-utility": "packages/test-utility"