From b21ecc89b56a42cd1914adf803404b75070b11de Mon Sep 17 00:00:00 2001
From: Star
Date: Wed, 2 Nov 2022 03:59:14 +0800
Subject: [PATCH 1/3] feat(ext-driver-bq): add bigquery datasource
---
package.json | 1 +
packages/extension-driver-bq/.eslintrc.json | 18 ++
packages/extension-driver-bq/README.md | 11 +
packages/extension-driver-bq/jest.config.ts | 14 +
packages/extension-driver-bq/package.json | 29 ++
packages/extension-driver-bq/project.json | 45 +++
packages/extension-driver-bq/src/index.ts | 3 +
.../src/lib/bqDataSource.ts | 165 ++++++++++
.../src/lib/bqlSqlBuilder.ts | 40 +++
.../extension-driver-bq/src/lib/typeMapper.ts | 51 +++
.../test/bqDataSource.spec.ts | 216 +++++++++++++
packages/extension-driver-bq/test/bqServer.ts | 25 ++
.../test/bqlSqlBuilder.spec.ts | 73 +++++
packages/extension-driver-bq/tsconfig.json | 22 ++
.../extension-driver-bq/tsconfig.lib.json | 10 +
.../extension-driver-bq/tsconfig.spec.json | 15 +
tsconfig.base.json | 3 +
workspace.json | 1 +
yarn.lock | 303 +++++++++++++++++-
19 files changed, 1041 insertions(+), 4 deletions(-)
create mode 100644 packages/extension-driver-bq/.eslintrc.json
create mode 100644 packages/extension-driver-bq/README.md
create mode 100644 packages/extension-driver-bq/jest.config.ts
create mode 100644 packages/extension-driver-bq/package.json
create mode 100644 packages/extension-driver-bq/project.json
create mode 100644 packages/extension-driver-bq/src/index.ts
create mode 100644 packages/extension-driver-bq/src/lib/bqDataSource.ts
create mode 100644 packages/extension-driver-bq/src/lib/bqlSqlBuilder.ts
create mode 100644 packages/extension-driver-bq/src/lib/typeMapper.ts
create mode 100644 packages/extension-driver-bq/test/bqDataSource.spec.ts
create mode 100644 packages/extension-driver-bq/test/bqServer.ts
create mode 100644 packages/extension-driver-bq/test/bqlSqlBuilder.spec.ts
create mode 100644 packages/extension-driver-bq/tsconfig.json
create mode 100644 packages/extension-driver-bq/tsconfig.lib.json
create mode 100644 packages/extension-driver-bq/tsconfig.spec.json
diff --git a/package.json b/package.json
index 8bfd62ba..a000dd4a 100644
--- a/package.json
+++ b/package.json
@@ -7,6 +7,7 @@
},
"private": true,
"dependencies": {
+ "@google-cloud/bigquery": "^6.0.3",
"@koa/cors": "^3.3.0",
"bcryptjs": "^2.4.3",
"bluebird": "^3.7.2",
diff --git a/packages/extension-driver-bq/.eslintrc.json b/packages/extension-driver-bq/.eslintrc.json
new file mode 100644
index 00000000..9d9c0db5
--- /dev/null
+++ b/packages/extension-driver-bq/.eslintrc.json
@@ -0,0 +1,18 @@
+{
+ "extends": ["../../.eslintrc.json"],
+ "ignorePatterns": ["!**/*"],
+ "overrides": [
+ {
+ "files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
+ "rules": {}
+ },
+ {
+ "files": ["*.ts", "*.tsx"],
+ "rules": {}
+ },
+ {
+ "files": ["*.js", "*.jsx"],
+ "rules": {}
+ }
+ ]
+}
diff --git a/packages/extension-driver-bq/README.md b/packages/extension-driver-bq/README.md
new file mode 100644
index 00000000..5c69c847
--- /dev/null
+++ b/packages/extension-driver-bq/README.md
@@ -0,0 +1,11 @@
+# extension-driver-bq
+
+This library was generated with [Nx](https://nx.dev).
+
+## Building
+
+Run `nx build extension-driver-bq` to build the library.
+
+## Running unit tests
+
+Run `nx test extension-driver-bq` to execute the unit tests via [Jest](https://jestjs.io).
diff --git a/packages/extension-driver-bq/jest.config.ts b/packages/extension-driver-bq/jest.config.ts
new file mode 100644
index 00000000..c0cef74f
--- /dev/null
+++ b/packages/extension-driver-bq/jest.config.ts
@@ -0,0 +1,14 @@
+module.exports = {
+ displayName: 'extension-driver-bq',
+ preset: '../../jest.preset.ts',
+ globals: {
+ 'ts-jest': {
+ tsconfig: '/tsconfig.spec.json',
+ },
+ },
+ transform: {
+ '^.+\\.[tj]s$': 'ts-jest',
+ },
+ moduleFileExtensions: ['ts', 'js', 'html'],
+ coverageDirectory: '../../coverage/packages/extension-driver-bq',
+};
diff --git a/packages/extension-driver-bq/package.json b/packages/extension-driver-bq/package.json
new file mode 100644
index 00000000..f1827a64
--- /dev/null
+++ b/packages/extension-driver-bq/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "@vulcan-sql/extension-driver-bq",
+ "description": "BigQuery driver for Vulcan SQL",
+ "version": "0.3.0",
+ "type": "commonjs",
+ "publishConfig": {
+ "access": "public"
+ },
+ "keywords": [
+ "vulcan",
+ "vulcan-sql",
+ "data",
+ "sql",
+ "database",
+ "data-warehouse",
+ "data-lake",
+ "api-builder",
+ "postgres",
+ "pg"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/Canner/vulcan.git"
+ },
+ "license": "MIT",
+ "peerDependencies": {
+ "@vulcan-sql/core": "~0.3.0-0"
+ }
+}
diff --git a/packages/extension-driver-bq/project.json b/packages/extension-driver-bq/project.json
new file mode 100644
index 00000000..7c974c25
--- /dev/null
+++ b/packages/extension-driver-bq/project.json
@@ -0,0 +1,45 @@
+{
+ "root": "packages/extension-driver-bq",
+ "sourceRoot": "packages/extension-driver-bq/src",
+ "targets": {
+ "build": {
+ "executor": "@nrwl/js:tsc",
+ "outputs": ["{options.outputPath}"],
+ "options": {
+ "outputPath": "dist/packages/extension-driver-bq",
+ "main": "packages/extension-driver-bq/src/index.ts",
+ "tsConfig": "packages/extension-driver-bq/tsconfig.lib.json",
+ "assets": ["packages/extension-driver-bq/*.md"]
+ }
+ },
+ "publish": {
+ "executor": "@nrwl/workspace:run-commands",
+ "options": {
+ "command": "node tools/scripts/publish.mjs extension-driver-bq {args.ver} {args.tag}",
+ "cwd": "dist/packages/extension-driver-bq"
+ },
+ "dependsOn": [
+ {
+ "projects": "self",
+ "target": "build"
+ }
+ ]
+ },
+ "lint": {
+ "executor": "@nrwl/linter:eslint",
+ "outputs": ["{options.outputFile}"],
+ "options": {
+ "lintFilePatterns": ["packages/extension-driver-bq/**/*.ts"]
+ }
+ },
+ "test": {
+ "executor": "@nrwl/jest:jest",
+ "outputs": ["coverage/packages/extension-driver-bq"],
+ "options": {
+ "jestConfig": "packages/extension-driver-bq/jest.config.ts",
+ "passWithNoTests": true
+ }
+ }
+ },
+ "tags": []
+}
diff --git a/packages/extension-driver-bq/src/index.ts b/packages/extension-driver-bq/src/index.ts
new file mode 100644
index 00000000..13c65f5d
--- /dev/null
+++ b/packages/extension-driver-bq/src/index.ts
@@ -0,0 +1,3 @@
+export * from './lib/bqDataSource';
+import { BQDataSource } from './lib/bqDataSource';
+export default [BQDataSource];
diff --git a/packages/extension-driver-bq/src/lib/bqDataSource.ts b/packages/extension-driver-bq/src/lib/bqDataSource.ts
new file mode 100644
index 00000000..60ac4a41
--- /dev/null
+++ b/packages/extension-driver-bq/src/lib/bqDataSource.ts
@@ -0,0 +1,165 @@
+import {
+ DataResult,
+ DataSource,
+ ExecuteOptions,
+ InternalError,
+ RequestParameter,
+ VulcanExtensionId,
+} from '@vulcan-sql/core';
+import { Readable } from 'stream';
+import { buildSQL } from './bqlSqlBuilder';
+import { mapFromBQTypeId } from './typeMapper';
+import { BigQuery, Query, Job, BigQueryOptions } from '@google-cloud/bigquery';
+import bigquery from '@google-cloud/bigquery/build/src/types';
+
+export interface BQOptions extends BigQueryOptions {
+ chunkSize?: number;
+ location?: string;
+}
+
+@VulcanExtensionId('bq')
+export class BQDataSource extends DataSource {
+ private logger = this.getLogger();
+ private bqMapping = new Map();
+
+ public override async onActivate() {
+ const profiles = this.getProfiles().values();
+ for (const profile of profiles) {
+ this.logger.debug(
+ `Initializing profile: ${profile.name} using pg driver`
+ );
+ const bigqueryClient = new BigQuery(profile.connection);
+ // https://cloud.google.com/nodejs/docs/reference/bigquery/latest
+
+ this.bqMapping.set(profile.name, {
+ bq: bigqueryClient,
+ });
+
+ // Testing connection
+ await bigqueryClient.query('SELECT 1;');
+ this.logger.debug(`Profile ${profile.name} initialized`);
+ }
+ }
+
+ public async execute({
+ statement: sql,
+ bindParams,
+ profileName,
+ operations,
+ }: ExecuteOptions): Promise {
+ if (!this.bqMapping.has(profileName)) {
+ throw new InternalError(`Profile instance ${profileName} not found`);
+ }
+ const { bq: client, options } = this.bqMapping.get(profileName)!;
+ this.logger.debug(`Acquiring connection from ${profileName}`);
+
+ origin;
+ const params: Record = {};
+ bindParams.forEach((value, key) => {
+ params[key.replace('@', '')] = value;
+ });
+
+ try {
+ const builtSQL = buildSQL(sql, operations);
+ const queryOptions = {
+ query: builtSQL,
+ location: options?.location || 'US',
+ params,
+ maxResults: options?.chunkSize || 100,
+ };
+
+ const [job] = await client.createQueryJob(queryOptions);
+
+ // All promises MUST fulfilled in this function or we are not able to release the connection when error occurred
+ return await this.getResultFromQueryJob(job, options);
+ } catch (e: any) {
+ this.logger.debug(
+ `Errors occurred, release connection from ${profileName}`
+ );
+ throw e;
+ }
+ }
+
+ public async prepare({ parameterIndex }: RequestParameter) {
+ return `@p${parameterIndex}`;
+ }
+
+ private async getResultFromQueryJob(
+ queryJob: Job,
+ options?: BQOptions
+ ): Promise {
+ const { chunkSize = 100 } = options || {};
+ const jobDataRead = this.jobDataRead.bind(this);
+ const firstChunk = await jobDataRead(queryJob, chunkSize);
+
+ // save first chunk in buffer for incoming requests
+ let bufferedRows = [...firstChunk.rows];
+ let bufferReadIndex = 0;
+ let nextQuery = firstChunk.nextQuery;
+
+ const fetchNext = async () => {
+ if (bufferReadIndex >= bufferedRows.length) {
+ if (nextQuery == null) return null;
+
+ const fetchData = await jobDataRead(queryJob, chunkSize, nextQuery);
+ bufferedRows = fetchData.rows;
+ nextQuery = fetchData.nextQuery;
+ bufferReadIndex = 0;
+ }
+ const res = bufferedRows[bufferReadIndex] || null;
+ bufferReadIndex += 1;
+ return res;
+ };
+
+ const stream = new Readable({
+ objectMode: true,
+ read() {
+ fetchNext()
+ .then((row) => {
+ this.push(row);
+ })
+ .catch((error) => {
+ this.destroy(error);
+ });
+ },
+ destroy(error: Error | null, cb: (error: Error | null) => void) {
+ // Send done event to notify upstream to release the connection.
+ cb(error);
+ },
+ // automatically destroy() the stream when it emits 'finish' or errors. Node > 10.16
+ autoDestroy: true,
+ });
+ return {
+ getColumns: () => {
+ const fields = firstChunk.apiResponse?.schema?.fields || [];
+ return fields.map((field) => ({
+ name: field.name || '',
+ type: mapFromBQTypeId(field.type || ''),
+ }));
+ },
+ getData: () => stream,
+ };
+ }
+
+ public async jobDataRead(
+ queryJob: Job,
+ chunkSize: number,
+ nextQuery?: Query | null | undefined
+ ) {
+ return new Promise<{
+ rows: any[];
+ nextQuery: Query | null | undefined;
+ apiResponse: bigquery.IGetQueryResultsResponse | null | undefined;
+ }>((resolve, reject) => {
+ return queryJob.getQueryResults(
+ nextQuery || { maxResults: chunkSize },
+ (err, rows, nextQuery, apiResponse) => {
+ if (err) {
+ return reject(err);
+ }
+ resolve({ rows: rows || [], nextQuery, apiResponse });
+ }
+ );
+ });
+ }
+}
diff --git a/packages/extension-driver-bq/src/lib/bqlSqlBuilder.ts b/packages/extension-driver-bq/src/lib/bqlSqlBuilder.ts
new file mode 100644
index 00000000..b5b94e95
--- /dev/null
+++ b/packages/extension-driver-bq/src/lib/bqlSqlBuilder.ts
@@ -0,0 +1,40 @@
+import { Parameterized, SQLClauseOperation } from '@vulcan-sql/core';
+import { isNull, isUndefined } from 'lodash';
+
+const isNullOrUndefine = (value: any) => isUndefined(value) || isNull(value);
+
+export const removeEndingSemiColon = (sql: string) => {
+ return sql.replace(/;([ \n]+)?$/, '');
+};
+
+export const addLimit = (sql: string, limit?: string | null) => {
+ if (isNullOrUndefine(limit)) return sql;
+ return [sql, `LIMIT`, limit].join(' ');
+};
+
+export const addOffset = (sql: string, offset?: string | null) => {
+ if (isNullOrUndefine(offset)) return sql;
+ return [sql, `OFFSET`, offset].join(' ');
+};
+
+// Check if there is no operations
+export const isNoOP = (
+ operations: Partial>
+): boolean => {
+ if (!isNullOrUndefine(operations.limit)) return false;
+ if (!isNullOrUndefine(operations.offset)) return false;
+ return true;
+};
+
+export const buildSQL = (
+ sql: string,
+ operations: Partial>
+): string => {
+ if (isNoOP(operations)) return sql;
+ let builtSQL = '';
+ builtSQL += `SELECT * FROM (${removeEndingSemiColon(sql)})`;
+ builtSQL = addLimit(builtSQL, operations.limit);
+ builtSQL = addOffset(builtSQL, operations.offset);
+ builtSQL += ';';
+ return builtSQL;
+};
diff --git a/packages/extension-driver-bq/src/lib/typeMapper.ts b/packages/extension-driver-bq/src/lib/typeMapper.ts
new file mode 100644
index 00000000..9f234260
--- /dev/null
+++ b/packages/extension-driver-bq/src/lib/typeMapper.ts
@@ -0,0 +1,51 @@
+const typeMapping = new Map();
+
+const register = (bqType: string, type: string) => {
+ typeMapping.set(bqType, type);
+};
+
+// Reference
+// https://github.com/googleapis/nodejs-bigquery/blob/main/src/types.d.ts#L3598-L3601
+/**
+ * [Required] The field data type. Possible values include
+ * STRING,
+ * BYTES,
+ * INTEGER,
+ * INT64 (same as INTEGER),
+ * FLOAT,
+ * FLOAT64 (same as FLOAT),
+ * NUMERIC,
+ * BIGNUMERIC,
+ * BOOLEAN,
+ * BOOL (same as BOOLEAN),
+ * TIMESTAMP,
+ * DATE,
+ * TIME,
+ * DATETIME,
+ * INTERVAL,
+ * RECORD (where RECORD indicates that the field contains a nested schema) or
+ * STRUCT (same as RECORD).
+ */
+
+register('STRING', 'string');
+register('BYTES', 'string');
+register('INTEGER', 'number');
+register('INT64', 'number');
+register('FLOAT', 'number');
+register('FLOAT64', 'number');
+register('NUMERIC', 'number');
+register('BIGNUMERIC', 'number');
+register('BOOLEAN', 'boolean');
+register('BOOL', 'boolean');
+register('TIMESTAMP', 'string');
+register('DATE', 'string');
+register('TIME', 'string');
+register('DATETIME', 'string');
+register('INTERVAL', 'string');
+register('RECORD', 'string');
+register('STRUCT', 'string');
+
+export const mapFromBQTypeId = (bqType: string) => {
+ if (typeMapping.has(bqType)) return typeMapping.get(bqType)!;
+ return 'string';
+};
diff --git a/packages/extension-driver-bq/test/bqDataSource.spec.ts b/packages/extension-driver-bq/test/bqDataSource.spec.ts
new file mode 100644
index 00000000..d3942bac
--- /dev/null
+++ b/packages/extension-driver-bq/test/bqDataSource.spec.ts
@@ -0,0 +1,216 @@
+import { BQDataSource } from '../src';
+import { BQflakeServer } from './bqServer';
+import { streamToArray } from '@vulcan-sql/core';
+import { Writable } from 'stream';
+
+const bigQuery = new BQflakeServer();
+let dataSource: BQDataSource;
+
+const bqTable = `\`cannerflow-286003.bq_testing_tpch.orders\``;
+
+it('Data source should be activate without any error when all profiles are valid', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+
+ // Act, Assert
+ await expect(dataSource.activate()).resolves.not.toThrow();
+});
+
+it('Data source should throw error when activating if any profile is invalid', async () => {
+ // Arrange
+ const invalidProfile = bigQuery.getProfile('profile1');
+ // invalidProfile.connection.projectId = 'invalid';
+ invalidProfile.connection.credentials = {};
+ dataSource = new BQDataSource({}, '', [
+ bigQuery.getProfile('profile1'),
+ invalidProfile,
+ ]);
+
+ // Act, Assert
+ await expect(dataSource.activate()).rejects.toThrow();
+});
+
+it('Data source should return correct rows with 2 chunks', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+ // Act
+ const { getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 193`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ const rows = await streamToArray(getData());
+ // Assert
+ expect(rows.length).toBe(193);
+}, 30000);
+
+it('Data source should return correct rows with 1 chunk', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+ // Act
+ const { getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 12`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ const rows = await streamToArray(getData());
+ // Assert
+ expect(rows.length).toBe(12);
+}, 30000);
+
+it('Data source should return empty data with no row', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+ // Act
+ const { getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 0`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ const rows = await streamToArray(getData());
+ // Assert
+ expect(rows.length).toBe(0);
+}, 30000);
+
+it('Data source should release the connection when finished no matter success or not', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+
+ // Act
+ // send parallel queries to test pool leak
+ const result = await Promise.all(
+ [
+ async () => {
+ const { getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 1`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ return await streamToArray(getData());
+ },
+ async () => {
+ try {
+ const { getData } = await dataSource.execute({
+ statement: 'wrong sql',
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ await streamToArray(getData());
+ return [{}]; // fake data
+ } catch {
+ // ignore error
+ return [];
+ }
+ },
+ async () => {
+ const { getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 1`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ return await streamToArray(getData());
+ },
+ ].map((task) => task())
+ );
+
+ // Assert
+ expect(result[0].length).toBe(1);
+ expect(result[1].length).toBe(0);
+ expect(result[2].length).toBe(1);
+}, 30000);
+
+it('Data source should work with prepare statements', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+ // Act
+ const bindParams = new Map();
+ const var1Name = await dataSource.prepare({
+ parameterIndex: 1,
+ value: '123',
+ profileName: 'profile1',
+ });
+ bindParams.set(var1Name, '123');
+
+ const var2Name = await dataSource.prepare({
+ parameterIndex: 2,
+ value: '456',
+ profileName: 'profile1',
+ });
+ bindParams.set(var2Name, '456');
+
+ const { getData } = await dataSource.execute({
+ statement: `select ${var1Name} as v1, ${var2Name} as v2;`,
+ bindParams,
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ const rows = await streamToArray(getData());
+ // Assert
+ expect(rows[0].v1).toBe('123');
+ expect(rows[0].v2).toBe('456');
+}, 30000);
+
+it('Data source should return correct column types', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+ // Act
+ const { getColumns, getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 0`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ const column = getColumns();
+ // We need to destroy the data stream or the driver waits for us
+ const data = getData();
+ data.destroy();
+
+ // Assert
+ expect(column[0]).toEqual({ name: 'orderkey', type: 'number' });
+ expect(column[2]).toEqual({ name: 'orderstatus', type: 'string' });
+}, 30000);
+
+it('Data source should release connection when readable stream is destroyed', async () => {
+ // Arrange
+ dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
+ await dataSource.activate();
+ // Act
+ const { getData } = await dataSource.execute({
+ statement: `select * from ${bqTable} limit 100`,
+ bindParams: new Map(),
+ profileName: 'profile1',
+ operations: {} as any,
+ });
+ const readStream = getData();
+ const rows: any[] = [];
+ let resolve: any;
+ const waitForStream = () => new Promise((res) => (resolve = res));
+ const writeStream = new Writable({
+ write(chunk, _, cb) {
+ rows.push(chunk);
+ // After read 5 records, destroy the upstream
+ if (rows.length === 5) {
+ readStream.destroy();
+ resolve();
+ } else cb();
+ },
+ objectMode: true,
+ });
+ readStream.pipe(writeStream);
+ await waitForStream();
+ // Assert
+ expect(rows.length).toBe(5);
+ // afterEach hook will timeout if any leak occurred.
+}, 30000);
diff --git a/packages/extension-driver-bq/test/bqServer.ts b/packages/extension-driver-bq/test/bqServer.ts
new file mode 100644
index 00000000..dfd178f6
--- /dev/null
+++ b/packages/extension-driver-bq/test/bqServer.ts
@@ -0,0 +1,25 @@
+import { BigQueryOptions } from '@google-cloud/bigquery';
+
+['BQ_CLIENT_EMAIL', 'BQ_PRIVATE_KEY', 'BQ_LOCATION', 'BQ_PROJECT_ID'].forEach(
+ (envName) => {
+ /* istanbul ignore next */
+ if (!process.env[envName]) throw new Error(`${envName} not defined`);
+ }
+);
+export class BQflakeServer {
+ public getProfile(name: string) {
+ return {
+ name,
+ type: 'bq',
+ location: process.env['BQ_LOCATION'],
+ connection: {
+ projectId: process.env['BQ_PROJECT_ID'],
+ credentials: {
+ client_email: process.env['BQ_CLIENT_EMAIL'],
+ private_key: process.env['BQ_PRIVATE_KEY']?.replace(/\\n/g, '\n'),
+ },
+ } as BigQueryOptions,
+ allow: '*',
+ };
+ }
+}
diff --git a/packages/extension-driver-bq/test/bqlSqlBuilder.spec.ts b/packages/extension-driver-bq/test/bqlSqlBuilder.spec.ts
new file mode 100644
index 00000000..c2b3ec21
--- /dev/null
+++ b/packages/extension-driver-bq/test/bqlSqlBuilder.spec.ts
@@ -0,0 +1,73 @@
+import * as builder from '../src/lib/bqlSqlBuilder';
+
+describe('SQL builders components test', () => {
+ it('removeEndingSemiColon', async () => {
+ // Arrange
+ const statement = `SELECT * FROM users; \n `;
+ // Act
+ const result = builder.removeEndingSemiColon(statement);
+ // Arrange
+ expect(result).toBe('SELECT * FROM users');
+ });
+
+ it('addLimit - string value', async () => {
+ // Arrange
+ const statement = `SELECT * FROM users`;
+ // Act
+ const result = builder.addLimit(statement, '$1');
+ // Arrange
+ expect(result).toBe('SELECT * FROM users LIMIT $1');
+ });
+
+ it('addLimit - null value', async () => {
+ // Arrange
+ const statement = `SELECT * FROM users`;
+ // Act
+ const result = builder.addLimit(statement, null);
+ // Arrange
+ expect(result).toBe('SELECT * FROM users');
+ });
+
+ it('addOffset - string value', async () => {
+ // Arrange
+ const statement = `SELECT * FROM users`;
+ // Act
+ const result = builder.addOffset(statement, '$1');
+ // Arrange
+ expect(result).toBe('SELECT * FROM users OFFSET $1');
+ });
+
+ it('addOffset - null value', async () => {
+ // Arrange
+ const statement = `SELECT * FROM users`;
+ // Act
+ const result = builder.addOffset(statement, null);
+ // Arrange
+ expect(result).toBe('SELECT * FROM users');
+ });
+
+ it('isNoOP - empty operation', async () => {
+ // Act
+ const result = builder.isNoOP({});
+ // Arrange
+ expect(result).toBe(true);
+ });
+
+ it('isNoOP - some operations', async () => {
+ // Act
+ const results = [{ limit: '$1' }, { offset: '$1' }].map(builder.isNoOP);
+ // Arrange
+ expect(results.every((result) => result === false)).toBeTruthy();
+ });
+});
+
+it('BuildSQL function should build sql with operations', async () => {
+ // Arrange
+ const statement = `SELECT * FROM users;`;
+ // Act
+ const result = builder.buildSQL(statement, { limit: '$1', offset: '$2' });
+ // Arrange
+ expect(result).toBe(
+ 'SELECT * FROM (SELECT * FROM users) LIMIT $1 OFFSET $2;'
+ );
+});
diff --git a/packages/extension-driver-bq/tsconfig.json b/packages/extension-driver-bq/tsconfig.json
new file mode 100644
index 00000000..f5b85657
--- /dev/null
+++ b/packages/extension-driver-bq/tsconfig.json
@@ -0,0 +1,22 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "module": "commonjs",
+ "forceConsistentCasingInFileNames": true,
+ "strict": true,
+ "noImplicitOverride": true,
+ "noPropertyAccessFromIndexSignature": true,
+ "noImplicitReturns": true,
+ "noFallthroughCasesInSwitch": true
+ },
+ "files": [],
+ "include": [],
+ "references": [
+ {
+ "path": "./tsconfig.lib.json"
+ },
+ {
+ "path": "./tsconfig.spec.json"
+ }
+ ]
+}
diff --git a/packages/extension-driver-bq/tsconfig.lib.json b/packages/extension-driver-bq/tsconfig.lib.json
new file mode 100644
index 00000000..1925baa1
--- /dev/null
+++ b/packages/extension-driver-bq/tsconfig.lib.json
@@ -0,0 +1,10 @@
+{
+ "extends": "./tsconfig.json",
+ "compilerOptions": {
+ "outDir": "../../dist/out-tsc",
+ "declaration": true,
+ "types": []
+ },
+ "include": ["**/*.ts", "../../types/*.d.ts"],
+ "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts"]
+}
diff --git a/packages/extension-driver-bq/tsconfig.spec.json b/packages/extension-driver-bq/tsconfig.spec.json
new file mode 100644
index 00000000..eb72f635
--- /dev/null
+++ b/packages/extension-driver-bq/tsconfig.spec.json
@@ -0,0 +1,15 @@
+{
+ "extends": "./tsconfig.json",
+ "compilerOptions": {
+ "outDir": "../../dist/out-tsc",
+ "module": "commonjs",
+ "types": ["jest", "node"]
+ },
+ "include": [
+ "jest.config.ts",
+ "**/*.test.ts",
+ "**/*.spec.ts",
+ "**/*.d.ts",
+ "../../types/*.d.ts"
+ ]
+}
diff --git a/tsconfig.base.json b/tsconfig.base.json
index 01d1558f..2903cc39 100644
--- a/tsconfig.base.json
+++ b/tsconfig.base.json
@@ -69,6 +69,9 @@
"@vulcan-sql/extension-debug-tools": [
"packages/extension-debug-tools/src/index.ts"
],
+ "@vulcan-sql/extension-driver-bq": [
+ "packages/extension-driver-bq/src/index.ts"
+ ],
"@vulcan-sql/extension-driver-duckdb": [
"packages/extension-driver-duckdb/src/index.ts"
],
diff --git a/workspace.json b/workspace.json
index bc98182c..d0f5d2b2 100644
--- a/workspace.json
+++ b/workspace.json
@@ -7,6 +7,7 @@
"doc": "packages/doc",
"extension-dbt": "packages/extension-dbt",
"extension-debug-tools": "packages/extension-debug-tools",
+ "extension-driver-bq": "packages/extension-driver-bq",
"extension-driver-duckdb": "packages/extension-driver-duckdb",
"extension-driver-pg": "packages/extension-driver-pg",
"extension-driver-snowflake": "packages/extension-driver-snowflake",
diff --git a/yarn.lock b/yarn.lock
index b82e4a02..28f134ed 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -379,6 +379,57 @@
resolved "https://registry.yarnpkg.com/@faker-js/faker/-/faker-6.3.1.tgz#1ae963dd40405450a2945408cba553e1afa3e0fb"
integrity sha512-8YXBE2ZcU/pImVOHX7MWrSR/X5up7t6rPWZlk34RwZEcdr3ua6X+32pSd6XuOQRN+vbuvYNfA6iey8NbrjuMFQ==
+"@google-cloud/bigquery@^6.0.3":
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/@google-cloud/bigquery/-/bigquery-6.0.3.tgz#4b02ee988e048116dad9c6692ac428d86f89ee92"
+ integrity sha512-BP464228S9dqDCb4dR99h9D8+N498YZi/AZvoOJUaieg2H6qbiYBE1xlYuaMvyV1WEQT/2/yZTCJnCo5WiaY0Q==
+ dependencies:
+ "@google-cloud/common" "^4.0.0"
+ "@google-cloud/paginator" "^4.0.0"
+ "@google-cloud/promisify" "^3.0.0"
+ arrify "^2.0.1"
+ big.js "^6.0.0"
+ duplexify "^4.0.0"
+ extend "^3.0.2"
+ is "^3.3.0"
+ p-event "^4.1.0"
+ readable-stream "^4.0.0"
+ stream-events "^1.0.5"
+ uuid "^8.0.0"
+
+"@google-cloud/common@^4.0.0":
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/@google-cloud/common/-/common-4.0.3.tgz#d4324ac83087385d727593f7e1b6d81ee66442cf"
+ integrity sha512-fUoMo5b8iAKbrYpneIRV3z95AlxVJPrjpevxs4SKoclngWZvTXBSGpNisF5+x5m+oNGve7jfB1e6vNBZBUs7Fw==
+ dependencies:
+ "@google-cloud/projectify" "^3.0.0"
+ "@google-cloud/promisify" "^3.0.0"
+ arrify "^2.0.1"
+ duplexify "^4.1.1"
+ ent "^2.2.0"
+ extend "^3.0.2"
+ google-auth-library "^8.0.2"
+ retry-request "^5.0.0"
+ teeny-request "^8.0.0"
+
+"@google-cloud/paginator@^4.0.0":
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/@google-cloud/paginator/-/paginator-4.0.1.tgz#5fb8793d4f84d18c50a6f2fad3dadab8d2c533ef"
+ integrity sha512-6G1ui6bWhNyHjmbYwavdN7mpVPRBtyDg/bfqBTAlwr413On2TnFNfDxc9UhTJctkgoCDgQXEKiRPLPR9USlkbQ==
+ dependencies:
+ arrify "^2.0.0"
+ extend "^3.0.2"
+
+"@google-cloud/projectify@^3.0.0":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@google-cloud/projectify/-/projectify-3.0.0.tgz#302b25f55f674854dce65c2532d98919b118a408"
+ integrity sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==
+
+"@google-cloud/promisify@^3.0.0":
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/@google-cloud/promisify/-/promisify-3.0.1.tgz#8d724fb280f47d1ff99953aee0c1669b25238c2e"
+ integrity sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==
+
"@hapi/hoek@^9.0.0":
version "9.3.0"
resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb"
@@ -1019,6 +1070,11 @@
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
+"@tootallnate/once@2":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf"
+ integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==
+
"@tsconfig/node10@^1.0.7":
version "1.0.9"
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2"
@@ -1636,6 +1692,13 @@ abbrev@1:
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
+abort-controller@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392"
+ integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==
+ dependencies:
+ event-target-shim "^5.0.0"
+
accepts@^1.3.5:
version "1.3.8"
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
@@ -1836,6 +1899,11 @@ array-unique@^0.3.2:
resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
integrity sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==
+arrify@^2.0.0, arrify@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa"
+ integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==
+
asap@^2.0.0, asap@^2.0.3:
version "2.0.6"
resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
@@ -1954,7 +2022,7 @@ balanced-match@^1.0.0:
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
-base64-js@^1.3.1:
+base64-js@^1.3.0, base64-js@^1.3.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
@@ -1984,6 +2052,16 @@ bcryptjs@^2.4.3:
resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb"
integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==
+big.js@^6.0.0:
+ version "6.2.1"
+ resolved "https://registry.yarnpkg.com/big.js/-/big.js-6.2.1.tgz#7205ce763efb17c2e41f26f121c420c6a7c2744f"
+ integrity sha512-bCtHMwL9LeDIozFn+oNhhFoq+yQ3BNdnsLSASUxLciOb1vgvpHsIO1dsENiGMgbb4SkP5TrzWzRiLddn8ahVOQ==
+
+bignumber.js@^9.0.0:
+ version "9.1.0"
+ resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.0.tgz#8d340146107fe3a6cb8d40699643c302e8773b62"
+ integrity sha512-4LwHK4nfDOraBCtst+wOWIHbu1vhvAPJK8g8nROd4iuc3PSEjWif/qwbkh8jwCJz6yDBvtU4KPynETgrfh7y3A==
+
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@@ -2078,6 +2156,11 @@ bson@^4.6.5:
dependencies:
buffer "^5.6.0"
+buffer-equal-constant-time@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
+ integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==
+
buffer-from@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
@@ -2096,6 +2179,14 @@ buffer@^5.5.0, buffer@^5.6.0:
base64-js "^1.3.1"
ieee754 "^1.1.13"
+buffer@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6"
+ integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
+ dependencies:
+ base64-js "^1.3.1"
+ ieee754 "^1.2.1"
+
buildcheck@0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.3.tgz#70451897a95d80f7807e68fc412eb2e7e35ff4d5"
@@ -2842,6 +2933,16 @@ duckdb@0.5.1:
node-addon-api "*"
node-gyp "^7.1.2"
+duplexify@^4.0.0, duplexify@^4.1.1:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-4.1.2.tgz#18b4f8d28289132fa0b9573c898d9f903f81c7b0"
+ integrity sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==
+ dependencies:
+ end-of-stream "^1.4.1"
+ inherits "^2.0.3"
+ readable-stream "^3.1.1"
+ stream-shift "^1.0.0"
+
ecc-jsbn@~0.1.1:
version "0.1.2"
resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9"
@@ -2850,6 +2951,13 @@ ecc-jsbn@~0.1.1:
jsbn "~0.1.0"
safer-buffer "^2.1.0"
+ecdsa-sig-formatter@1.0.11, ecdsa-sig-formatter@^1.0.11:
+ version "1.0.11"
+ resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
+ integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
+ dependencies:
+ safe-buffer "^5.0.1"
+
ee-first@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
@@ -2896,6 +3004,11 @@ enquirer@~2.3.6:
dependencies:
ansi-colors "^4.1.1"
+ent@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d"
+ integrity sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==
+
env-paths@^2.2.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2"
@@ -3082,11 +3195,21 @@ esutils@^2.0.2:
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
+event-target-shim@^5.0.0:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789"
+ integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==
+
eventemitter3@^4.0.7:
version "4.0.7"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
+events@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
+ integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
+
execa@^5.0.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
@@ -3152,7 +3275,7 @@ extend-shallow@^3.0.0, extend-shallow@^3.0.2:
assign-symbols "^1.0.0"
is-extendable "^1.0.1"
-extend@^3.0.0, extend@~3.0.2:
+extend@^3.0.0, extend@^3.0.2, extend@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
@@ -3232,6 +3355,11 @@ fast-safe-stringify@^2.0.7, fast-safe-stringify@^2.1.1:
resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884"
integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==
+fast-text-encoding@^1.0.0:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz#0aa25f7f638222e3396d72bf936afcf1d42d6867"
+ integrity sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==
+
fastq@^1.6.0:
version "1.13.0"
resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c"
@@ -3509,6 +3637,24 @@ gauge@~2.7.3:
strip-ansi "^3.0.1"
wide-align "^1.1.0"
+gaxios@^5.0.0, gaxios@^5.0.1:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-5.0.2.tgz#ca3a40e851c728d31d7001c2357062d46bf966d1"
+ integrity sha512-TjtV2AJOZoMQqRYoy5eM8cCQogYwazWNYLQ72QB0kwa6vHHruYkGmhhyrlzbmgNHK1dNnuP2WSH81urfzyN2Og==
+ dependencies:
+ extend "^3.0.2"
+ https-proxy-agent "^5.0.0"
+ is-stream "^2.0.0"
+ node-fetch "^2.6.7"
+
+gcp-metadata@^5.0.0:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-5.0.1.tgz#8d1e785ee7fad554bc2a80c1f930c9a9518d2b00"
+ integrity sha512-jiRJ+Fk7e8FH68Z6TLaqwea307OktJpDjmYnU7/li6ziwvVvU2RlrCyQo5vkdeP94chm0kcSCOOszvmuaioq3g==
+ dependencies:
+ gaxios "^5.0.0"
+ json-bigint "^1.0.0"
+
gensync@^1.0.0-beta.2:
version "1.0.0-beta.2"
resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0"
@@ -3680,11 +3826,42 @@ globby@^11.0.4:
merge2 "^1.4.1"
slash "^3.0.0"
+google-auth-library@^8.0.2:
+ version "8.6.0"
+ resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-8.6.0.tgz#79cc4c8bacffee26bac244f25f4968ac87218bb8"
+ integrity sha512-y6bw1yTWMVgs1vGJwBZ3uu+uIClfgxQfsEVcTNKjQeNQOVwox69+ZUgTeTAzrh+74hBqrk1gWyb9RsQVDI7seg==
+ dependencies:
+ arrify "^2.0.0"
+ base64-js "^1.3.0"
+ ecdsa-sig-formatter "^1.0.11"
+ fast-text-encoding "^1.0.0"
+ gaxios "^5.0.0"
+ gcp-metadata "^5.0.0"
+ gtoken "^6.1.0"
+ jws "^4.0.0"
+ lru-cache "^6.0.0"
+
+google-p12-pem@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-4.0.1.tgz#82841798253c65b7dc2a4e5fe9df141db670172a"
+ integrity sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==
+ dependencies:
+ node-forge "^1.3.1"
+
graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.3, graceful-fs@^4.2.9:
version "4.2.10"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
+gtoken@^6.1.0:
+ version "6.1.2"
+ resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-6.1.2.tgz#aeb7bdb019ff4c3ba3ac100bbe7b6e74dce0e8bc"
+ integrity sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==
+ dependencies:
+ gaxios "^5.0.1"
+ google-p12-pem "^4.0.0"
+ jws "^4.0.0"
+
har-schema@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
@@ -3831,6 +4008,15 @@ http-proxy-agent@^4.0.1:
agent-base "6"
debug "4"
+http-proxy-agent@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43"
+ integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==
+ dependencies:
+ "@tootallnate/once" "2"
+ agent-base "6"
+ debug "4"
+
http-signature@~1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
@@ -3872,7 +4058,7 @@ identity-obj-proxy@3.0.0:
dependencies:
harmony-reflect "^1.4.6"
-ieee754@^1.1.13:
+ieee754@^1.1.13, ieee754@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
@@ -4180,6 +4366,11 @@ is-wsl@^2.2.0:
dependencies:
is-docker "^2.0.0"
+is@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/is/-/is-3.3.0.tgz#61cff6dd3c4193db94a3d62582072b44e5645d79"
+ integrity sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==
+
isarray@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
@@ -4748,6 +4939,13 @@ jsesc@^2.5.1:
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==
+json-bigint@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/json-bigint/-/json-bigint-1.0.0.tgz#ae547823ac0cad8398667f8cd9ef4730f5b01ff1"
+ integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==
+ dependencies:
+ bignumber.js "^9.0.0"
+
json-parse-even-better-errors@^2.3.0:
version "2.3.1"
resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
@@ -4838,6 +5036,23 @@ just-extend@^4.0.2:
resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744"
integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==
+jwa@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/jwa/-/jwa-2.0.0.tgz#a7e9c3f29dae94027ebcaf49975c9345593410fc"
+ integrity sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==
+ dependencies:
+ buffer-equal-constant-time "1.0.1"
+ ecdsa-sig-formatter "1.0.11"
+ safe-buffer "^5.0.1"
+
+jws@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/jws/-/jws-4.0.0.tgz#2d4e8cf6a318ffaa12615e9dec7e86e6c97310f4"
+ integrity sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==
+ dependencies:
+ jwa "^2.0.0"
+ safe-buffer "^5.0.1"
+
kareem@2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/kareem/-/kareem-2.4.1.tgz#7d81ec518204a48c1cb16554af126806c3cd82b0"
@@ -5441,6 +5656,11 @@ node-fetch@^2.6.1, node-fetch@^2.6.7:
dependencies:
whatwg-url "^5.0.0"
+node-forge@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
+ integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==
+
node-gyp-build@^4.3.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.4.0.tgz#42e99687ce87ddeaf3a10b99dc06abc11021f3f4"
@@ -5777,6 +5997,18 @@ os-tmpdir@~1.0.2:
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==
+p-event@^4.1.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/p-event/-/p-event-4.2.0.tgz#af4b049c8acd91ae81083ebd1e6f5cae2044c1b5"
+ integrity sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==
+ dependencies:
+ p-timeout "^3.1.0"
+
+p-finally@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
+ integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
+
p-limit@^2.2.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
@@ -5791,6 +6023,13 @@ p-locate@^4.1.0:
dependencies:
p-limit "^2.2.0"
+p-timeout@^3.1.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe"
+ integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==
+ dependencies:
+ p-finally "^1.0.0"
+
p-try@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
@@ -6108,6 +6347,11 @@ process-nextick-args@~2.0.0:
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
+process@^0.11.10:
+ version "0.11.10"
+ resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
+ integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==
+
prompts@^2.0.1:
version "2.4.2"
resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
@@ -6233,6 +6477,16 @@ readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable
string_decoder "^1.1.1"
util-deprecate "^1.0.1"
+readable-stream@^4.0.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.2.0.tgz#a7ef523d3b39e4962b0db1a1af22777b10eeca46"
+ integrity sha512-gJrBHsaI3lgBoGMW/jHZsQ/o/TIWiu5ENCJG1BB7fuCKzpFM8GaS2UoBVt9NO+oI+3FcrBNbUkl3ilDe09aY4A==
+ dependencies:
+ abort-controller "^3.0.0"
+ buffer "^6.0.3"
+ events "^3.3.0"
+ process "^0.11.10"
+
readdirp@~3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
@@ -6422,6 +6676,14 @@ ret@~0.1.10:
resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
+retry-request@^5.0.0:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/retry-request/-/retry-request-5.0.2.tgz#143d85f90c755af407fcc46b7166a4ba520e44da"
+ integrity sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==
+ dependencies:
+ debug "^4.1.1"
+ extend "^3.0.2"
+
reusify@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
@@ -6822,6 +7084,18 @@ stickyfill@^1.1.1:
resolved "https://registry.yarnpkg.com/stickyfill/-/stickyfill-1.1.1.tgz#39413fee9d025c74a7e59ceecb23784cc0f17f02"
integrity sha512-GCp7vHAfpao+Qh/3Flh9DXEJ/qSi0KJwJw6zYlZOtRYXWUIpMM6mC2rIep/dK8RQqwW0KxGJIllmjPIBOGN8AA==
+stream-events@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/stream-events/-/stream-events-1.0.5.tgz#bbc898ec4df33a4902d892333d47da9bf1c406d5"
+ integrity sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==
+ dependencies:
+ stubs "^3.0.0"
+
+stream-shift@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
+ integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
+
string-length@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a"
@@ -6896,6 +7170,11 @@ strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+stubs@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/stubs/-/stubs-3.0.0.tgz#e8d2ba1fa9c90570303c030b6900f7d5f89abe5b"
+ integrity sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==
+
style-loader@^3.3.1:
version "3.3.1"
resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575"
@@ -7020,6 +7299,17 @@ tarn@^3.0.1:
resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693"
integrity sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==
+teeny-request@^8.0.0:
+ version "8.0.2"
+ resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-8.0.2.tgz#c06a75101cf782788ba8f9a2ed5f2ac84c1c4e15"
+ integrity sha512-34pe0a4zASseXZCKdeTiIZqSKA8ETHb1EwItZr01PAR3CLPojeAKgSjzeNS4373gi59hNulyDrPKEbh2zO9sCg==
+ dependencies:
+ http-proxy-agent "^5.0.0"
+ https-proxy-agent "^5.0.0"
+ node-fetch "^2.6.1"
+ stream-events "^1.0.5"
+ uuid "^9.0.0"
+
terminal-link@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994"
@@ -7394,11 +7684,16 @@ uuid@^3.3.2:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
-uuid@^8.3.2:
+uuid@^8.0.0, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+uuid@^9.0.0:
+ version "9.0.0"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5"
+ integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==
+
v8-compile-cache-lib@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
From 2c58dc0a22ba3282e277f7dde9c9134f3d0f5aa0 Mon Sep 17 00:00:00 2001
From: Star
Date: Wed, 2 Nov 2022 18:21:27 +0800
Subject: [PATCH 2/3] docs(ext-driver-bq): update README and official document
for BigQuery dataSource
---
README.md | 108 +++++++++++-----------
packages/doc/docs/connectors.mdx | 2 +-
packages/doc/docs/connectors/bigquery.mdx | 70 ++++++++++++++
packages/extension-driver-bq/README.md | 53 ++++++++++-
4 files changed, 173 insertions(+), 60 deletions(-)
create mode 100644 packages/doc/docs/connectors/bigquery.mdx
diff --git a/README.md b/README.md
index ad04ee02..cf2a2ce6 100644
--- a/README.md
+++ b/README.md
@@ -18,19 +18,21 @@
## What is VulcanSQL
-> **VulcanSQL is an Analytics API generator** that helps data engineers to build scalable analytics APIs using only SQL without writing any backend code.
+
+> **VulcanSQL is an Analytics API generator** that helps data engineers to build scalable analytics APIs using only SQL without writing any backend code.
## Why VulcanSQL?
-APIs are still the primary programming interface for data consumers to utilize data in their daily business applications, such as BI, reports, dashboards, spreadsheets, and web applications. However, data stored in data warehouses are not accessible for those users and tools without an API consumption layer.
+APIs are still the primary programming interface for data consumers to utilize data in their daily business applications, such as BI, reports, dashboards, spreadsheets, and web applications. However, data stored in data warehouses are not accessible for those users and tools without an API consumption layer.
-VulcanSQL aims to solve that problem by translating SQL into flexible APIs; it is contextual in that it can translate APIs into the corresponding SQL based on different user personas and business contexts. It is also extendable with custom business logic and complex SQL translation.
+VulcanSQL aims to solve that problem by translating SQL into flexible APIs; it is contextual in that it can translate APIs into the corresponding SQL based on different user personas and business contexts. It is also extendable with custom business logic and complex SQL translation.
## When use VulcanSQL?
When scaling data usages outside the traditional data team to business users and application developers using APIs. VulcanSQL is the perfect solution for data using in applications.
## Features
+
- Parameterized SQL into scalable and secure APIs
- Built-in API access and version control
- Built-in self-generated API documentation
@@ -40,12 +42,13 @@ When scaling data usages outside the traditional data team to business users and
- PosgreSQL
- DuckDB
-- Snowflake (WIP)
-- BigQuery (WIP)
+- Snowflake
+- BigQuery
## How VulcanSQL works?
### Step 1: Parameterized your SQL.
+
@@ -75,64 +78,58 @@ Response
1. Error Handling
- If you want to throw errors based on business logic. for example, run a query first, if no data return, throw `404 not found`.
-
- ```sql
- {% req user %}
- select * from public.users where userName = {{ context.parames.userName }} limit 1;
- {% endreq %}
-
- {% if user.value().length == 0 %}
- {% error "user not found" %}
- {% endif %}
-
- select * from public.groups where userId = {{ user.value()[0].id }};
- ```
+If you want to throw errors based on business logic. for example, run a query first, if no data return, throw `404 not found`.
+
+```sql
+{% req user %}
+select * from public.users where userName = {{ context.parames.userName }} limit 1;
+{% endreq %}
+
+{% if user.value().length == 0 %}
+ {% error "user not found" %}
+{% endif %}
+
+select * from public.groups where userId = {{ user.value()[0].id }};
+```
+
2. Authorization
- You can pass in user attributes to achieve user access control. We will build the corresponding SQL on the fly.
-
- ```sql
- select
- --- masking address if query user is not admin
- {% if context.user.name == 'ADMIN' %}
- {% "address" %}
- {% elif %}
- {% "masking(address)" %}
- {% endif %},
-
- orderId,
- amount
- from orders
-
- --- limit the data to the store user belongs to.
- where store = {{ context.user.attr.store }}
- ```
+You can pass in user attributes to achieve user access control. We will build the corresponding SQL on the fly.
+
+```sql
+select
+ --- masking address if query user is not admin
+ {% if context.user.name == 'ADMIN' %}
+ {% "address" %}
+ {% elif %}
+ {% "masking(address)" %}
+ {% endif %},
+
+ orderId,
+ amount
+from orders
+
+--- limit the data to the store user belongs to.
+where store = {{ context.user.attr.store }}
+```
+
3. Validation
- You can add a number validator on `userId` input.
-
- - SQL
- ```sql
- select * from public.users where id = {{ context.params.userId }}
- ```
-
- - Schema
- ```yaml
- parameters:
- userId:
- in: query
- validators: # set your validator here.
- - name: 'number'
- ```
-
+You can add a number validator on `userId` input.
+- SQL
+ ```sql
+ select * from public.users where id = {{ context.params.userId }}
+ ```
+- Schema
+`yaml parameters: userId: in: query validators: # set your validator here. - name: 'number' `
+
### Step 2: Build self-serve documentation and catalog
@@ -176,6 +173,7 @@ On API catalog page, you can preview data or read from your applications.
Visit [the documentation](https://vulcansql.com/docs/installation) for installation guide.
## Demo Screenshot
+
@@ -201,10 +199,12 @@ Visit [the documentation](https://vulcansql.com/docs/installation) for installat
> 🔌 **Connect**: Users will be able to follow the guide and connect from their applications.
## Community
-* Welcome to our [Discord](https://discord.gg/ztDz8DCmG4) to give us feedback!
-* If any issues, please visit [Github Issues](https://github.com/Canner/vulcan-sql/issues)
+
+- Welcome to our [Discord](https://discord.gg/ztDz8DCmG4) to give us feedback!
+- If any issues, please visit [Github Issues](https://github.com/Canner/vulcan-sql/issues)
## Special Thanks
+
diff --git a/packages/doc/docs/connectors.mdx b/packages/doc/docs/connectors.mdx
index cf99cf28..766cc8a6 100644
--- a/packages/doc/docs/connectors.mdx
+++ b/packages/doc/docs/connectors.mdx
@@ -7,7 +7,7 @@ We support the following data warehouses to connect with, you can choose multipl
| [PostgreSQL](./connectors/postgresql) | ✅ Yes | ✅ Yes | ❌ No |
| [DuckDB](./connectors/duckdb) | ✅ Yes | ✅ Yes | ❌ No |
| [Snowflake](./connectors/snowflake) | ✅ Yes | ✅ Yes | ❌ No |
-| BigQuery | | | |
+| BigQuery | ✅ Yes | ✅ Yes | ❌ No |
\* Fetching rows only when we need them, it has better performance with large query results.
diff --git a/packages/doc/docs/connectors/bigquery.mdx b/packages/doc/docs/connectors/bigquery.mdx
new file mode 100644
index 00000000..205b5c07
--- /dev/null
+++ b/packages/doc/docs/connectors/bigquery.mdx
@@ -0,0 +1,70 @@
+# BigQuery
+
+Connect with your bigquery servers via the official [Node.js Driver](https://cloud.google.com/nodejs/docs/reference/bigquery/latest).
+
+## Installation
+
+1. Install package
+
+ ```bash
+ npm i @vulcan-sql/extension-driver-bq
+ ```
+
+ :::info
+ If you run VulcanSQL with Docker, you should use the command `vulcan-install @vulcan-sql/extension-driver-bq` instead.
+
+ :::
+
+2. Update `vulcan.yaml`, and enable the extension.
+
+ ```yaml
+ extensions:
+ ...
+ // highlight-next-line
+ bq: '@vulcan-sql/extension-driver-bq' # Add this line
+ ```
+
+3. Create a new profile in `profiles.yaml` or in your profile files. For example:
+ :::info
+ You can choose one from `keyFilename` or `credentials` to use.
+ For details, please refer to [here](https://cloud.google.com/docs/authentication#service-accounts)
+ :::
+
+wish keyFilename:
+
+```yaml
+- name: bq # profile name
+ type: bq
+ connection:
+ location: ''
+ projectId: 'your-project-id'
+ keyFilename: '/path/to/keyfile.json'
+ allow: '*'
+```
+
+wish credential:
+
+```yaml
+- name: bq # profile name
+ type: bq
+ connection:
+ location: US
+ projectId: 'your-project-id'
+ credential:
+ client_email: vulcan@projectId.iam.gserviceaccount.com
+ private_key: '-----BEGIN PRIVATE KEY----- XXXXX -----END PRIVATE KEY-----\n'
+ allow: '*'
+```
+
+## Connection Configuration
+
+Please check [Interface BigQueryOptions](https://cloud.google.com/nodejs/docs/reference/bigquery/latest/bigquery/bigqueryoptions) and [Google BigQuery: Node.js Client](https://github.com/googleapis/nodejs-bigquery/blob/main/src/bigquery.ts#L173-L244) for further information.
+
+| Name | Required | Default | Description |
+| ------------------------ | -------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| location | N | US | Location must match that of the dataset(s) referenced in the query. |
+| projectId | N | | The project ID from the Google Developer's Console, e.g. 'grape-spaceship-123'. We will also check the environment variable `GCLOUD_PROJECT` for your project ID. If your app is running in an environment which [supports](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application Application Default Credentials), your project ID will be detected. |
+| keyFilename | N | | Full path to the a .json, .pem, or .p12 key downloaded from the Google Developers Console. If you provide a path to a JSON file, the `projectId` option above is not necessary. NOTE: .pem and .p12 require you to specify the `email` option as well. |
+| credentials | N | | Credentials object. |
+| credentials.client_email | N | | Your service account. |
+| credentials.private_key | N | | Your service account's private key. |
diff --git a/packages/extension-driver-bq/README.md b/packages/extension-driver-bq/README.md
index 5c69c847..c51f7cac 100644
--- a/packages/extension-driver-bq/README.md
+++ b/packages/extension-driver-bq/README.md
@@ -1,11 +1,54 @@
# extension-driver-bq
-This library was generated with [Nx](https://nx.dev).
+[nodejs-bigquery](https://cloud.google.com/nodejs/docs/reference/bigquery/latest) driver for Vulcan SQL.
+
+## Install
+
+1. Install package
+
+ ```sql
+ npm i @vulcan-sql/extension-driver-bq
+ ```
+
+2. Update `vulcan.yaml`, enable the extension.
+
+ ```yaml
+ extensions:
+ bq: '@vulcan-sql/extension-driver-bq'
+ ```
-## Building
+3. Create a new profile in `profiles.yaml` or in your profiles' paths.
-Run `nx build extension-driver-bq` to build the library.
+> ⚠️ Your service account must have the following permissions to successfully execute queries...
+>
+> - BigQuery Data Viewer
+> - BigQuery Job User
+
+```yaml
+- name: bq # profile name
+ type: bq
+ connection:
+ # Location must match that of the dataset(s) referenced in the query.
+ location: US
+ # Optional: The max rows we should fetch once.
+ chunkSize: 100
+ # The project ID from the Google Developer's Console, e.g. 'grape-spaceship-123'. We will also check the environment variable `GCLOUD_PROJECT` for your project ID. If your app is running in an environment which [supports](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application Application Default Credentials), your project ID will be detected.
+ projectId: 'your-project-id'
+ # Full path to the a .json, .pem, or .p12 key downloaded from the Google Developers Console. If you provide a path to a JSON file, the `projectId` option above is not necessary. NOTE: .pem and .p12 require you to specify the `email` option as well.
+ keyFilename: '/path/to/keyfile.json'
+```
+
+## Testing
+
+```bash
+nx test extension-driver-bq
+```
+
+This library was generated with [Nx](https://nx.dev).
-## Running unit tests
+To run test, the following environment variables are required:
-Run `nx test extension-driver-bq` to execute the unit tests via [Jest](https://jestjs.io).
+- BQ_LOCATION
+- BQ_PROJECT_ID
+- BQ_CLIENT_EMAIL
+- BQ_PRIVATE_KEY
From 2d347bd22ac50641f7568d08f390a576ed7364c0 Mon Sep 17 00:00:00 2001
From: Star
Date: Thu, 3 Nov 2022 12:43:47 +0800
Subject: [PATCH 3/3] fix(ext-drievr-bq): Fix pr suggestion, including typo,
document layout and testing
---
packages/doc/docs/connectors.mdx | 2 +-
packages/doc/docs/connectors/bigquery.mdx | 62 ++++++-----
packages/extension-driver-bq/README.md | 4 +-
packages/extension-driver-bq/package.json | 4 +-
packages/extension-driver-bq/project.json | 25 ++++-
.../src/lib/bqDataSource.ts | 20 ++--
.../test/bqDataSource.spec.ts | 102 ++----------------
packages/extension-driver-bq/test/bqServer.ts | 2 +-
8 files changed, 77 insertions(+), 144 deletions(-)
diff --git a/packages/doc/docs/connectors.mdx b/packages/doc/docs/connectors.mdx
index 766cc8a6..6ccb2e9b 100644
--- a/packages/doc/docs/connectors.mdx
+++ b/packages/doc/docs/connectors.mdx
@@ -7,7 +7,7 @@ We support the following data warehouses to connect with, you can choose multipl
| [PostgreSQL](./connectors/postgresql) | ✅ Yes | ✅ Yes | ❌ No |
| [DuckDB](./connectors/duckdb) | ✅ Yes | ✅ Yes | ❌ No |
| [Snowflake](./connectors/snowflake) | ✅ Yes | ✅ Yes | ❌ No |
-| BigQuery | ✅ Yes | ✅ Yes | ❌ No |
+| [BigQuery](./connectors/bigquery) | ✅ Yes | ✅ Yes | ❌ No |
\* Fetching rows only when we need them, it has better performance with large query results.
diff --git a/packages/doc/docs/connectors/bigquery.mdx b/packages/doc/docs/connectors/bigquery.mdx
index 205b5c07..dcb9af38 100644
--- a/packages/doc/docs/connectors/bigquery.mdx
+++ b/packages/doc/docs/connectors/bigquery.mdx
@@ -27,34 +27,42 @@ Connect with your bigquery servers via the official [Node.js Driver](https://clo
3. Create a new profile in `profiles.yaml` or in your profile files. For example:
:::info
You can choose one from `keyFilename` or `credentials` to use.
- For details, please refer to [here](https://cloud.google.com/docs/authentication#service-accounts)
+
+ Your service account must have the following permissions to successfully execute queries.
+
+ - BigQuery Data Viewer
+ - BigQuery Job User
+
+ >
+
+ For details, please refer to [here](https://cloud.google.com/docs/authentication#service-accounts).
:::
-wish keyFilename:
-
-```yaml
-- name: bq # profile name
- type: bq
- connection:
- location: ''
- projectId: 'your-project-id'
- keyFilename: '/path/to/keyfile.json'
- allow: '*'
-```
-
-wish credential:
-
-```yaml
-- name: bq # profile name
- type: bq
- connection:
- location: US
- projectId: 'your-project-id'
- credential:
- client_email: vulcan@projectId.iam.gserviceaccount.com
- private_key: '-----BEGIN PRIVATE KEY----- XXXXX -----END PRIVATE KEY-----\n'
- allow: '*'
-```
+ with `keyFilename`:
+
+ ```yaml
+ - name: bq # profile name
+ type: bq
+ connection:
+ location: US
+ projectId: 'your-project-id'
+ keyFilename: '/path/to/keyfile.json'
+ allow: '*'
+ ```
+
+ with `credential`:
+
+ ```yaml
+ - name: bq # profile name
+ type: bq
+ connection:
+ location: US
+ projectId: 'your-project-id'
+ credential:
+ client_email: vulcan@projectId.iam.gserviceaccount.com
+ private_key: '-----BEGIN PRIVATE KEY----- XXXXX -----END PRIVATE KEY-----\n'
+ allow: '*'
+ ```
## Connection Configuration
@@ -63,7 +71,7 @@ Please check [Interface BigQueryOptions](https://cloud.google.com/nodejs/docs/re
| Name | Required | Default | Description |
| ------------------------ | -------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| location | N | US | Location must match that of the dataset(s) referenced in the query. |
-| projectId | N | | The project ID from the Google Developer's Console, e.g. 'grape-spaceship-123'. We will also check the environment variable `GCLOUD_PROJECT` for your project ID. If your app is running in an environment which [supports](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application Application Default Credentials), your project ID will be detected. |
+| projectId | N | | The project ID from the Google Developer's Console, e.g. 'grape-spaceship-123'. We will also check the environment variable `GCLOUD_PROJECT` for your project ID. If your app is running in an environment which [supports](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application) Application Default Credentials, your project ID will be detected. |
| keyFilename | N | | Full path to the a .json, .pem, or .p12 key downloaded from the Google Developers Console. If you provide a path to a JSON file, the `projectId` option above is not necessary. NOTE: .pem and .p12 require you to specify the `email` option as well. |
| credentials | N | | Credentials object. |
| credentials.client_email | N | | Your service account. |
diff --git a/packages/extension-driver-bq/README.md b/packages/extension-driver-bq/README.md
index c51f7cac..7c680db4 100644
--- a/packages/extension-driver-bq/README.md
+++ b/packages/extension-driver-bq/README.md
@@ -19,7 +19,7 @@
3. Create a new profile in `profiles.yaml` or in your profiles' paths.
-> ⚠️ Your service account must have the following permissions to successfully execute queries...
+> ⚠️ Your service account must have the following permissions to successfully execute queries.
>
> - BigQuery Data Viewer
> - BigQuery Job User
@@ -32,7 +32,7 @@
location: US
# Optional: The max rows we should fetch once.
chunkSize: 100
- # The project ID from the Google Developer's Console, e.g. 'grape-spaceship-123'. We will also check the environment variable `GCLOUD_PROJECT` for your project ID. If your app is running in an environment which [supports](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application Application Default Credentials), your project ID will be detected.
+ # The project ID from the Google Developer's Console, e.g. 'grape-spaceship-123'. We will also check the environment variable `GCLOUD_PROJECT` for your project ID. If your app is running in an environment which [supports](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application) Application Default Credentials), your project ID will be detected.
projectId: 'your-project-id'
# Full path to the a .json, .pem, or .p12 key downloaded from the Google Developers Console. If you provide a path to a JSON file, the `projectId` option above is not necessary. NOTE: .pem and .p12 require you to specify the `email` option as well.
keyFilename: '/path/to/keyfile.json'
diff --git a/packages/extension-driver-bq/package.json b/packages/extension-driver-bq/package.json
index f1827a64..e07ce072 100644
--- a/packages/extension-driver-bq/package.json
+++ b/packages/extension-driver-bq/package.json
@@ -15,8 +15,8 @@
"data-warehouse",
"data-lake",
"api-builder",
- "postgres",
- "pg"
+ "bigquery",
+ "bq"
],
"repository": {
"type": "git",
diff --git a/packages/extension-driver-bq/project.json b/packages/extension-driver-bq/project.json
index 7c974c25..afac79f1 100644
--- a/packages/extension-driver-bq/project.json
+++ b/packages/extension-driver-bq/project.json
@@ -3,19 +3,38 @@
"sourceRoot": "packages/extension-driver-bq/src",
"targets": {
"build": {
+ "executor": "@nrwl/workspace:run-commands",
+ "options": {
+ "command": "yarn ts-node ./tools/scripts/replaceAlias.ts extension-driver-bq"
+ },
+ "dependsOn": [
+ {
+ "projects": "self",
+ "target": "tsc"
+ }
+ ]
+ },
+ "tsc": {
"executor": "@nrwl/js:tsc",
"outputs": ["{options.outputPath}"],
"options": {
"outputPath": "dist/packages/extension-driver-bq",
"main": "packages/extension-driver-bq/src/index.ts",
"tsConfig": "packages/extension-driver-bq/tsconfig.lib.json",
- "assets": ["packages/extension-driver-bq/*.md"]
- }
+ "assets": ["packages/extension-driver-bq/*.md"],
+ "buildableProjectDepsInPackageJsonType": "dependencies"
+ },
+ "dependsOn": [
+ {
+ "projects": "dependencies",
+ "target": "build"
+ }
+ ]
},
"publish": {
"executor": "@nrwl/workspace:run-commands",
"options": {
- "command": "node tools/scripts/publish.mjs extension-driver-bq {args.ver} {args.tag}",
+ "command": "node ../../../tools/scripts/publish.mjs {args.tag} {args.version}",
"cwd": "dist/packages/extension-driver-bq"
},
"dependsOn": [
diff --git a/packages/extension-driver-bq/src/lib/bqDataSource.ts b/packages/extension-driver-bq/src/lib/bqDataSource.ts
index 60ac4a41..ca0e43c7 100644
--- a/packages/extension-driver-bq/src/lib/bqDataSource.ts
+++ b/packages/extension-driver-bq/src/lib/bqDataSource.ts
@@ -26,13 +26,14 @@ export class BQDataSource extends DataSource {
const profiles = this.getProfiles().values();
for (const profile of profiles) {
this.logger.debug(
- `Initializing profile: ${profile.name} using pg driver`
+ `Initializing profile: ${profile.name} using bq driver`
);
const bigqueryClient = new BigQuery(profile.connection);
// https://cloud.google.com/nodejs/docs/reference/bigquery/latest
this.bqMapping.set(profile.name, {
bq: bigqueryClient,
+ options: profile.connection,
});
// Testing connection
@@ -51,9 +52,7 @@ export class BQDataSource extends DataSource {
throw new InternalError(`Profile instance ${profileName} not found`);
}
const { bq: client, options } = this.bqMapping.get(profileName)!;
- this.logger.debug(`Acquiring connection from ${profileName}`);
- origin;
const params: Record = {};
bindParams.forEach((value, key) => {
params[key.replace('@', '')] = value;
@@ -70,7 +69,6 @@ export class BQDataSource extends DataSource {
const [job] = await client.createQueryJob(queryOptions);
- // All promises MUST fulfilled in this function or we are not able to release the connection when error occurred
return await this.getResultFromQueryJob(job, options);
} catch (e: any) {
this.logger.debug(
@@ -89,8 +87,8 @@ export class BQDataSource extends DataSource {
options?: BQOptions
): Promise {
const { chunkSize = 100 } = options || {};
- const jobDataRead = this.jobDataRead.bind(this);
- const firstChunk = await jobDataRead(queryJob, chunkSize);
+ const fetchJobResult = this.fetchJobResult.bind(this);
+ const firstChunk = await fetchJobResult(queryJob, chunkSize);
// save first chunk in buffer for incoming requests
let bufferedRows = [...firstChunk.rows];
@@ -101,7 +99,7 @@ export class BQDataSource extends DataSource {
if (bufferReadIndex >= bufferedRows.length) {
if (nextQuery == null) return null;
- const fetchData = await jobDataRead(queryJob, chunkSize, nextQuery);
+ const fetchData = await fetchJobResult(queryJob, chunkSize, nextQuery);
bufferedRows = fetchData.rows;
nextQuery = fetchData.nextQuery;
bufferReadIndex = 0;
@@ -122,10 +120,6 @@ export class BQDataSource extends DataSource {
this.destroy(error);
});
},
- destroy(error: Error | null, cb: (error: Error | null) => void) {
- // Send done event to notify upstream to release the connection.
- cb(error);
- },
// automatically destroy() the stream when it emits 'finish' or errors. Node > 10.16
autoDestroy: true,
});
@@ -141,7 +135,7 @@ export class BQDataSource extends DataSource {
};
}
- public async jobDataRead(
+ public async fetchJobResult(
queryJob: Job,
chunkSize: number,
nextQuery?: Query | null | undefined
@@ -151,7 +145,7 @@ export class BQDataSource extends DataSource {
nextQuery: Query | null | undefined;
apiResponse: bigquery.IGetQueryResultsResponse | null | undefined;
}>((resolve, reject) => {
- return queryJob.getQueryResults(
+ queryJob.getQueryResults(
nextQuery || { maxResults: chunkSize },
(err, rows, nextQuery, apiResponse) => {
if (err) {
diff --git a/packages/extension-driver-bq/test/bqDataSource.spec.ts b/packages/extension-driver-bq/test/bqDataSource.spec.ts
index d3942bac..dca6bd5e 100644
--- a/packages/extension-driver-bq/test/bqDataSource.spec.ts
+++ b/packages/extension-driver-bq/test/bqDataSource.spec.ts
@@ -1,13 +1,10 @@
import { BQDataSource } from '../src';
import { BQflakeServer } from './bqServer';
import { streamToArray } from '@vulcan-sql/core';
-import { Writable } from 'stream';
const bigQuery = new BQflakeServer();
let dataSource: BQDataSource;
-const bqTable = `\`cannerflow-286003.bq_testing_tpch.orders\``;
-
it('Data source should be activate without any error when all profiles are valid', async () => {
// Arrange
dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
@@ -16,10 +13,9 @@ it('Data source should be activate without any error when all profiles are valid
await expect(dataSource.activate()).resolves.not.toThrow();
});
-it('Data source should throw error when activating if any profile is invalid', async () => {
+it('Data source should throw error when activating any profile which is invalid', async () => {
// Arrange
const invalidProfile = bigQuery.getProfile('profile1');
- // invalidProfile.connection.projectId = 'invalid';
invalidProfile.connection.credentials = {};
dataSource = new BQDataSource({}, '', [
bigQuery.getProfile('profile1'),
@@ -36,7 +32,7 @@ it('Data source should return correct rows with 2 chunks', async () => {
await dataSource.activate();
// Act
const { getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 193`,
+ statement: `SELECT num FROM UNNEST(GENERATE_ARRAY(1, 193)) AS num`,
bindParams: new Map(),
profileName: 'profile1',
operations: {} as any,
@@ -52,7 +48,7 @@ it('Data source should return correct rows with 1 chunk', async () => {
await dataSource.activate();
// Act
const { getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 12`,
+ statement: `SELECT num FROM UNNEST(GENERATE_ARRAY(1, 20)) AS num LIMIT 12`,
bindParams: new Map(),
profileName: 'profile1',
operations: {} as any,
@@ -68,7 +64,7 @@ it('Data source should return empty data with no row', async () => {
await dataSource.activate();
// Act
const { getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 0`,
+ statement: `SELECT num FROM UNNEST(GENERATE_ARRAY(1, 10)) AS num LIMIT 0`,
bindParams: new Map(),
profileName: 'profile1',
operations: {} as any,
@@ -78,57 +74,6 @@ it('Data source should return empty data with no row', async () => {
expect(rows.length).toBe(0);
}, 30000);
-it('Data source should release the connection when finished no matter success or not', async () => {
- // Arrange
- dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
- await dataSource.activate();
-
- // Act
- // send parallel queries to test pool leak
- const result = await Promise.all(
- [
- async () => {
- const { getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 1`,
- bindParams: new Map(),
- profileName: 'profile1',
- operations: {} as any,
- });
- return await streamToArray(getData());
- },
- async () => {
- try {
- const { getData } = await dataSource.execute({
- statement: 'wrong sql',
- bindParams: new Map(),
- profileName: 'profile1',
- operations: {} as any,
- });
- await streamToArray(getData());
- return [{}]; // fake data
- } catch {
- // ignore error
- return [];
- }
- },
- async () => {
- const { getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 1`,
- bindParams: new Map(),
- profileName: 'profile1',
- operations: {} as any,
- });
- return await streamToArray(getData());
- },
- ].map((task) => task())
- );
-
- // Assert
- expect(result[0].length).toBe(1);
- expect(result[1].length).toBe(0);
- expect(result[2].length).toBe(1);
-}, 30000);
-
it('Data source should work with prepare statements', async () => {
// Arrange
dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
@@ -167,7 +112,7 @@ it('Data source should return correct column types', async () => {
await dataSource.activate();
// Act
const { getColumns, getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 0`,
+ statement: `SELECT CAST(1 as bigint) as a, true as b`,
bindParams: new Map(),
profileName: 'profile1',
operations: {} as any,
@@ -178,39 +123,6 @@ it('Data source should return correct column types', async () => {
data.destroy();
// Assert
- expect(column[0]).toEqual({ name: 'orderkey', type: 'number' });
- expect(column[2]).toEqual({ name: 'orderstatus', type: 'string' });
-}, 30000);
-
-it('Data source should release connection when readable stream is destroyed', async () => {
- // Arrange
- dataSource = new BQDataSource({}, '', [bigQuery.getProfile('profile1')]);
- await dataSource.activate();
- // Act
- const { getData } = await dataSource.execute({
- statement: `select * from ${bqTable} limit 100`,
- bindParams: new Map(),
- profileName: 'profile1',
- operations: {} as any,
- });
- const readStream = getData();
- const rows: any[] = [];
- let resolve: any;
- const waitForStream = () => new Promise((res) => (resolve = res));
- const writeStream = new Writable({
- write(chunk, _, cb) {
- rows.push(chunk);
- // After read 5 records, destroy the upstream
- if (rows.length === 5) {
- readStream.destroy();
- resolve();
- } else cb();
- },
- objectMode: true,
- });
- readStream.pipe(writeStream);
- await waitForStream();
- // Assert
- expect(rows.length).toBe(5);
- // afterEach hook will timeout if any leak occurred.
+ expect(column[0]).toEqual({ name: 'a', type: 'number' });
+ expect(column[1]).toEqual({ name: 'b', type: 'boolean' });
}, 30000);
diff --git a/packages/extension-driver-bq/test/bqServer.ts b/packages/extension-driver-bq/test/bqServer.ts
index dfd178f6..b6efaafd 100644
--- a/packages/extension-driver-bq/test/bqServer.ts
+++ b/packages/extension-driver-bq/test/bqServer.ts
@@ -11,8 +11,8 @@ export class BQflakeServer {
return {
name,
type: 'bq',
- location: process.env['BQ_LOCATION'],
connection: {
+ location: process.env['BQ_LOCATION'],
projectId: process.env['BQ_PROJECT_ID'],
credentials: {
client_email: process.env['BQ_CLIENT_EMAIL'],