From abaec8faa84e23f947f178f7b3a5b0208aa65859 Mon Sep 17 00:00:00 2001 From: nithinkdb Date: Thu, 17 Aug 2023 16:38:30 -0700 Subject: [PATCH 01/10] Initial commit --- lib/DBSQLClient.ts | 2 +- lib/DBSQLSession.ts | 2 + lib/connection/connections/HttpConnection.ts | 2 +- lib/contracts/IDBSQLSession.ts | 1 + lib/utils/ParameterConverter.ts | 38 ++++++++++++++ tests/e2e/parameterized_query.test.js | 53 ++++++++++++++++++++ 6 files changed, 96 insertions(+), 2 deletions(-) create mode 100644 lib/utils/ParameterConverter.ts create mode 100644 tests/e2e/parameterized_query.test.js diff --git a/lib/DBSQLClient.ts b/lib/DBSQLClient.ts index 41418791..a4823cc0 100644 --- a/lib/DBSQLClient.ts +++ b/lib/DBSQLClient.ts @@ -1,6 +1,6 @@ +import { EventEmitter } from 'events'; import thrift, { HttpHeaders } from 'thrift'; -import { EventEmitter } from 'events'; import TCLIService from '../thrift/TCLIService'; import { TProtocolVersion } from '../thrift/TCLIService_types'; import IDBSQLClient, { ClientOptions, ConnectionOptions, OpenSessionRequest } from './contracts/IDBSQLClient'; diff --git a/lib/DBSQLSession.ts b/lib/DBSQLSession.ts index f952e562..263bd242 100644 --- a/lib/DBSQLSession.ts +++ b/lib/DBSQLSession.ts @@ -29,6 +29,7 @@ import CloseableCollection from './utils/CloseableCollection'; import IDBSQLLogger, { LogLevel } from './contracts/IDBSQLLogger'; import HiveDriverError from './errors/HiveDriverError'; import globalConfig from './globalConfig'; +import convertToSparkParameters from './utils/ParameterConverter'; const defaultMaxRows = 100000; @@ -140,6 +141,7 @@ export default class DBSQLSession implements IDBSQLSession { ...getDirectResultsOptions(options.maxRows), ...getArrowOptions(), canDownloadResult: options.useCloudFetch ?? globalConfig.useCloudFetch, + parameters: options.parameters ? convertToSparkParameters(options.parameters) : undefined, }); const response = await this.handleResponse(operationPromise); return this.createOperation(response); diff --git a/lib/connection/connections/HttpConnection.ts b/lib/connection/connections/HttpConnection.ts index ec665ed5..0cc19155 100644 --- a/lib/connection/connections/HttpConnection.ts +++ b/lib/connection/connections/HttpConnection.ts @@ -1,6 +1,6 @@ -import thrift from 'thrift'; import https from 'https'; import http, { IncomingMessage } from 'http'; +import thrift from 'thrift'; import IThriftConnection from '../contracts/IThriftConnection'; import IConnectionProvider from '../contracts/IConnectionProvider'; diff --git a/lib/contracts/IDBSQLSession.ts b/lib/contracts/IDBSQLSession.ts index b80a6666..bedffe6d 100644 --- a/lib/contracts/IDBSQLSession.ts +++ b/lib/contracts/IDBSQLSession.ts @@ -8,6 +8,7 @@ export type ExecuteStatementOptions = { runAsync?: boolean; maxRows?: number | null; useCloudFetch?: boolean; + parameters?: object; }; export type TypeInfoRequest = { diff --git a/lib/utils/ParameterConverter.ts b/lib/utils/ParameterConverter.ts new file mode 100644 index 00000000..a52744c3 --- /dev/null +++ b/lib/utils/ParameterConverter.ts @@ -0,0 +1,38 @@ +import { TSparkParameter, TSparkParameterValue } from '../../thrift/TCLIService_types'; +import HiveDriverError from '../errors/HiveDriverError'; + +function getTypeAndValue(value: any): [string, TSparkParameterValue] { + switch (typeof value) { + case 'object': + if (value === null) { + return ['VOID', new TSparkParameterValue()]; + } + + throw new HiveDriverError('Unsupported object type used for parameterized query.'); + + case 'boolean': + return ['BOOLEAN', new TSparkParameterValue({ booleanValue: value })]; + case 'number': + if (Number.isInteger(value)) { + return ['INT', new TSparkParameterValue({ doubleValue: value })]; + } + + return ['DOUBLE', new TSparkParameterValue({ doubleValue: value })]; + + case 'string': + return ['STRING', new TSparkParameterValue({ stringValue: value })]; + default: + throw new HiveDriverError('Unsupported object type used for parameterized query.'); + } +} + +export default function convertToSparkParameters(params: object): TSparkParameter[] { + const sparkValueParams = []; + for (const e of Object.entries(params)) { + const key = e[0]; + const value = e[1]; + const typeValueTuple = getTypeAndValue(value); + sparkValueParams.push(new TSparkParameter({ name: key, type: typeValueTuple[0], value: typeValueTuple[1] })); + } + return sparkValueParams; +} diff --git a/tests/e2e/parameterized_query.test.js b/tests/e2e/parameterized_query.test.js new file mode 100644 index 00000000..d363b700 --- /dev/null +++ b/tests/e2e/parameterized_query.test.js @@ -0,0 +1,53 @@ +const { expect } = require('chai'); +const config = require('./utils/config'); +const logger = require('./utils/logger')(config.logger); + +const DBSQLClient = require('../../dist/DBSQLClient').default; +const convertToSparkParameters = require('../../dist/utils/ParameterConverter').default; +const { TSparkParameterValue, TSparkParameter } = require('../../thrift/TCLIService_types'); +const globalConfig = require('../../dist/globalConfig').default; + +describe('Parameterized query converter unit test', () => { + expect(convertToSparkParameters({ key: null })[0]).to.deep.eq( + new TSparkParameter({ name: 'key', type: 'VOID', value: new TSparkParameterValue() }), + ); + expect(convertToSparkParameters({ key: 'value' })[0]).to.deep.eq( + new TSparkParameter({ name: 'key', type: 'STRING', value: new TSparkParameterValue({ stringValue: 'value' }) }), + ); + expect(convertToSparkParameters({ key: 1 })[0]).to.deep.eq( + new TSparkParameter({ name: 'key', type: 'INT', value: new TSparkParameterValue({ doubleValue: 1 }) }), + ); + expect(convertToSparkParameters({ key: 1.1 })[0]).to.deep.eq( + new TSparkParameter({ name: 'key', type: 'DOUBLE', value: new TSparkParameterValue({ doubleValue: 1.1 }) }), + ); + expect(convertToSparkParameters({ key: true })[0]).to.deep.eq( + new TSparkParameter({ name: 'key', type: 'BOOLEAN', value: new TSparkParameterValue({ booleanValue: true }) }), + ); +}); + +const openSession = async () => { + const client = new DBSQLClient(); + + const connection = await client.connect({ + host: config.host, + path: config.path, + token: config.token, + }); + + return connection.openSession({ + initialCatalog: config.database[0], + initialSchema: config.database[1], + }); +}; + +describe('Parameterized Query', async () => { + it('should use default socket timeout', async () => { + const query = ` + select * from default.stock_data where open > {{parameter}} + `; + + let session = await openSession(); + + let result = await session.executeStatement(query, { parameters: 2 }); + }); +}); From dd093e0dfab541eb6905ba424ea48f7f9b61946c Mon Sep 17 00:00:00 2001 From: nithinkdb Date: Mon, 21 Aug 2023 16:13:35 -0700 Subject: [PATCH 02/10] Fixed comments --- lib/DBSQLSession.ts | 4 ++-- lib/contracts/IDBSQLSession.ts | 3 ++- .../{ParameterConverter.ts => convertToSparkParameters.ts} | 0 3 files changed, 4 insertions(+), 3 deletions(-) rename lib/utils/{ParameterConverter.ts => convertToSparkParameters.ts} (100%) diff --git a/lib/DBSQLSession.ts b/lib/DBSQLSession.ts index 263bd242..5dad486d 100644 --- a/lib/DBSQLSession.ts +++ b/lib/DBSQLSession.ts @@ -29,7 +29,7 @@ import CloseableCollection from './utils/CloseableCollection'; import IDBSQLLogger, { LogLevel } from './contracts/IDBSQLLogger'; import HiveDriverError from './errors/HiveDriverError'; import globalConfig from './globalConfig'; -import convertToSparkParameters from './utils/ParameterConverter'; +import convertToSparkParameters from './utils/convertToSparkParameters'; const defaultMaxRows = 100000; @@ -141,7 +141,7 @@ export default class DBSQLSession implements IDBSQLSession { ...getDirectResultsOptions(options.maxRows), ...getArrowOptions(), canDownloadResult: options.useCloudFetch ?? globalConfig.useCloudFetch, - parameters: options.parameters ? convertToSparkParameters(options.parameters) : undefined, + parameters: options.namedParameters ? convertToSparkParameters(options.namedParameters) : undefined, }); const response = await this.handleResponse(operationPromise); return this.createOperation(response); diff --git a/lib/contracts/IDBSQLSession.ts b/lib/contracts/IDBSQLSession.ts index bedffe6d..ae0679db 100644 --- a/lib/contracts/IDBSQLSession.ts +++ b/lib/contracts/IDBSQLSession.ts @@ -8,7 +8,8 @@ export type ExecuteStatementOptions = { runAsync?: boolean; maxRows?: number | null; useCloudFetch?: boolean; - parameters?: object; + namedParameters?: Record; + positionalParameters?: Record; }; export type TypeInfoRequest = { diff --git a/lib/utils/ParameterConverter.ts b/lib/utils/convertToSparkParameters.ts similarity index 100% rename from lib/utils/ParameterConverter.ts rename to lib/utils/convertToSparkParameters.ts From ffb817cc6a68c3b9578e07f64cc5f072430522fc Mon Sep 17 00:00:00 2001 From: nithinkdb Date: Mon, 21 Aug 2023 16:14:41 -0700 Subject: [PATCH 03/10] Reverted change --- lib/DBSQLClient.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/DBSQLClient.ts b/lib/DBSQLClient.ts index a4823cc0..41418791 100644 --- a/lib/DBSQLClient.ts +++ b/lib/DBSQLClient.ts @@ -1,6 +1,6 @@ -import { EventEmitter } from 'events'; import thrift, { HttpHeaders } from 'thrift'; +import { EventEmitter } from 'events'; import TCLIService from '../thrift/TCLIService'; import { TProtocolVersion } from '../thrift/TCLIService_types'; import IDBSQLClient, { ClientOptions, ConnectionOptions, OpenSessionRequest } from './contracts/IDBSQLClient'; From 638268671913e6410d8fb940241a775850709fee Mon Sep 17 00:00:00 2001 From: nithinkdb Date: Thu, 31 Aug 2023 17:42:08 -0700 Subject: [PATCH 04/10] Updated to new standard --- lib/DBSQLClient.ts | 2 +- lib/DBSQLParameter.ts | 19 +++++ lib/DBSQLSession.ts | 2 +- lib/contracts/IDBSQLSession.ts | 4 +- lib/utils/convertToSparkParameters.ts | 107 ++++++++++++++++++------- tests/e2e/parameterized_query.test.js | 53 ------------ tests/unit/parameterized_query.test.js | 37 +++++++++ 7 files changed, 137 insertions(+), 87 deletions(-) create mode 100644 lib/DBSQLParameter.ts delete mode 100644 tests/e2e/parameterized_query.test.js create mode 100644 tests/unit/parameterized_query.test.js diff --git a/lib/DBSQLClient.ts b/lib/DBSQLClient.ts index 41418791..a4823cc0 100644 --- a/lib/DBSQLClient.ts +++ b/lib/DBSQLClient.ts @@ -1,6 +1,6 @@ +import { EventEmitter } from 'events'; import thrift, { HttpHeaders } from 'thrift'; -import { EventEmitter } from 'events'; import TCLIService from '../thrift/TCLIService'; import { TProtocolVersion } from '../thrift/TCLIService_types'; import IDBSQLClient, { ClientOptions, ConnectionOptions, OpenSessionRequest } from './contracts/IDBSQLClient'; diff --git a/lib/DBSQLParameter.ts b/lib/DBSQLParameter.ts new file mode 100644 index 00000000..2ff636cc --- /dev/null +++ b/lib/DBSQLParameter.ts @@ -0,0 +1,19 @@ +interface ParameterInput { + name?: string; + type?: string; + value?: any; +} + +export default class DBSQLParameter { + name?: string; + + type?: string; + + value?: any; + + public constructor({ name, type, value }: ParameterInput) { + this.name = name; + this.type = type; + this.value = value; + } +} diff --git a/lib/DBSQLSession.ts b/lib/DBSQLSession.ts index 5dad486d..ca097d75 100644 --- a/lib/DBSQLSession.ts +++ b/lib/DBSQLSession.ts @@ -141,7 +141,7 @@ export default class DBSQLSession implements IDBSQLSession { ...getDirectResultsOptions(options.maxRows), ...getArrowOptions(), canDownloadResult: options.useCloudFetch ?? globalConfig.useCloudFetch, - parameters: options.namedParameters ? convertToSparkParameters(options.namedParameters) : undefined, + parameters: options.parameters ? convertToSparkParameters(options.parameters) : undefined, }); const response = await this.handleResponse(operationPromise); return this.createOperation(response); diff --git a/lib/contracts/IDBSQLSession.ts b/lib/contracts/IDBSQLSession.ts index ae0679db..c705f5d8 100644 --- a/lib/contracts/IDBSQLSession.ts +++ b/lib/contracts/IDBSQLSession.ts @@ -2,14 +2,14 @@ import IOperation from './IOperation'; import Status from '../dto/Status'; import InfoValue from '../dto/InfoValue'; import { Int64 } from '../hive/Types'; +import DBSQLParameter from '../DBSQLParameter'; export type ExecuteStatementOptions = { queryTimeout?: Int64; runAsync?: boolean; maxRows?: number | null; useCloudFetch?: boolean; - namedParameters?: Record; - positionalParameters?: Record; + parameters?: DBSQLParameter[]; }; export type TypeInfoRequest = { diff --git a/lib/utils/convertToSparkParameters.ts b/lib/utils/convertToSparkParameters.ts index a52744c3..3a5e2f74 100644 --- a/lib/utils/convertToSparkParameters.ts +++ b/lib/utils/convertToSparkParameters.ts @@ -1,38 +1,85 @@ import { TSparkParameter, TSparkParameterValue } from '../../thrift/TCLIService_types'; +import DBSQLParameter from '../DBSQLParameter'; import HiveDriverError from '../errors/HiveDriverError'; -function getTypeAndValue(value: any): [string, TSparkParameterValue] { - switch (typeof value) { - case 'object': - if (value === null) { - return ['VOID', new TSparkParameterValue()]; - } - - throw new HiveDriverError('Unsupported object type used for parameterized query.'); - - case 'boolean': - return ['BOOLEAN', new TSparkParameterValue({ booleanValue: value })]; - case 'number': - if (Number.isInteger(value)) { - return ['INT', new TSparkParameterValue({ doubleValue: value })]; +function convertToDBSQLParameters(values: any[]): DBSQLParameter[] { + const params: DBSQLParameter[] = []; + for (const value of values) { + switch (typeof value) { + case 'object': + if (value === null) { + params.push(new DBSQLParameter({})); + break; + } + if (value instanceof DBSQLParameter) { + params.push(value); + break; + } + throw new HiveDriverError('Unsupported object type used for parameterized query.'); + default: + params.push(new DBSQLParameter({ value })); + } + } + return params; +} +// Possible inputs to the params array: +// Naked args (will be converted to DBParameter) +function inferTypes(params: DBSQLParameter[]): void { + for (const param of params) { + if (!param.type) { + switch (typeof param.value) { + case 'undefined': + param.type = 'VOID'; + break; + case 'boolean': + param.type = 'BOOLEAN'; + param.value = param.value.toString(); + break; + case 'number': + if (Number.isInteger(param.value)) { + param.type = 'INTEGER'; + } else { + param.type = 'DOUBLE'; + } + param.value = param.value.toString(); + break; + case 'string': + param.type = 'STRING'; + break; + default: + throw new HiveDriverError('Unsupported object type used for parameterized query.'); } - - return ['DOUBLE', new TSparkParameterValue({ doubleValue: value })]; - - case 'string': - return ['STRING', new TSparkParameterValue({ stringValue: value })]; - default: - throw new HiveDriverError('Unsupported object type used for parameterized query.'); + } } } - -export default function convertToSparkParameters(params: object): TSparkParameter[] { - const sparkValueParams = []; - for (const e of Object.entries(params)) { - const key = e[0]; - const value = e[1]; - const typeValueTuple = getTypeAndValue(value); - sparkValueParams.push(new TSparkParameter({ name: key, type: typeValueTuple[0], value: typeValueTuple[1] })); +export default function convertToSparkParameters(values: any[]): TSparkParameter[] { + const params = convertToDBSQLParameters(values); + const retVal: TSparkParameter[] = []; + inferTypes(params); + for (const param of params) { + switch (typeof param.value) { + case 'string': + retVal.push( + new TSparkParameter({ + name: param.name, + value: new TSparkParameterValue({ stringValue: param.value }), + type: param.type, + }), + ); + break; + case 'undefined': + retVal.push(new TSparkParameter({ name: param.name, value: new TSparkParameterValue({}), type: param.type })); + break; + default: + // Cast to a string and then return param + retVal.push( + new TSparkParameter({ + name: param.name, + value: new TSparkParameterValue({ stringValue: param.value.toString() }), + type: param.type, + }), + ); + } } - return sparkValueParams; + return retVal; } diff --git a/tests/e2e/parameterized_query.test.js b/tests/e2e/parameterized_query.test.js deleted file mode 100644 index d363b700..00000000 --- a/tests/e2e/parameterized_query.test.js +++ /dev/null @@ -1,53 +0,0 @@ -const { expect } = require('chai'); -const config = require('./utils/config'); -const logger = require('./utils/logger')(config.logger); - -const DBSQLClient = require('../../dist/DBSQLClient').default; -const convertToSparkParameters = require('../../dist/utils/ParameterConverter').default; -const { TSparkParameterValue, TSparkParameter } = require('../../thrift/TCLIService_types'); -const globalConfig = require('../../dist/globalConfig').default; - -describe('Parameterized query converter unit test', () => { - expect(convertToSparkParameters({ key: null })[0]).to.deep.eq( - new TSparkParameter({ name: 'key', type: 'VOID', value: new TSparkParameterValue() }), - ); - expect(convertToSparkParameters({ key: 'value' })[0]).to.deep.eq( - new TSparkParameter({ name: 'key', type: 'STRING', value: new TSparkParameterValue({ stringValue: 'value' }) }), - ); - expect(convertToSparkParameters({ key: 1 })[0]).to.deep.eq( - new TSparkParameter({ name: 'key', type: 'INT', value: new TSparkParameterValue({ doubleValue: 1 }) }), - ); - expect(convertToSparkParameters({ key: 1.1 })[0]).to.deep.eq( - new TSparkParameter({ name: 'key', type: 'DOUBLE', value: new TSparkParameterValue({ doubleValue: 1.1 }) }), - ); - expect(convertToSparkParameters({ key: true })[0]).to.deep.eq( - new TSparkParameter({ name: 'key', type: 'BOOLEAN', value: new TSparkParameterValue({ booleanValue: true }) }), - ); -}); - -const openSession = async () => { - const client = new DBSQLClient(); - - const connection = await client.connect({ - host: config.host, - path: config.path, - token: config.token, - }); - - return connection.openSession({ - initialCatalog: config.database[0], - initialSchema: config.database[1], - }); -}; - -describe('Parameterized Query', async () => { - it('should use default socket timeout', async () => { - const query = ` - select * from default.stock_data where open > {{parameter}} - `; - - let session = await openSession(); - - let result = await session.executeStatement(query, { parameters: 2 }); - }); -}); diff --git a/tests/unit/parameterized_query.test.js b/tests/unit/parameterized_query.test.js new file mode 100644 index 00000000..a37e1e4c --- /dev/null +++ b/tests/unit/parameterized_query.test.js @@ -0,0 +1,37 @@ +const { expect } = require('chai'); + +const DBSQLClient = require('../../dist/DBSQLClient').default; +const convertToSparkParameters = require('../../dist/utils/convertToSparkParameters').default; +const { TSparkParameterValue, TSparkParameter } = require('../../thrift/TCLIService_types'); +const { default: DBSQLParameter } = require('../../dist/DBSQLParameter'); + +describe('Test Inference', () => { + it('should infer types correctly', () => { + let params = convertToSparkParameters([null, 'value', 1, 1.1, true]); + expect(params[0]).to.deep.eq(new TSparkParameter({ type: 'VOID', value: new TSparkParameterValue() })); + expect(params[1]).to.deep.eq( + new TSparkParameter({ type: 'STRING', value: new TSparkParameterValue({ stringValue: 'value' }) }), + ); + expect(params[2]).to.deep.eq( + new TSparkParameter({ type: 'INTEGER', value: new TSparkParameterValue({ stringValue: '1' }) }), + ); + expect(params[3]).to.deep.eq( + new TSparkParameter({ type: 'DOUBLE', value: new TSparkParameterValue({ stringValue: '1.1' }) }), + ); + expect(params[4]).to.deep.eq( + new TSparkParameter({ type: 'BOOLEAN', value: new TSparkParameterValue({ stringValue: 'true' }) }), + ); + }); + it('should preserve name info', () => { + let params = convertToSparkParameters([ + new DBSQLParameter({ name: '1', value: 26 }), + new DBSQLParameter({ name: '2', value: 6.2, type: 'DECIMAL' }), + ]); + expect(params[0]).to.deep.eq( + new TSparkParameter({ name: '1', type: 'INTEGER', value: new TSparkParameterValue({ stringValue: '26' }) }), + ); + expect(params[1]).to.deep.eq( + new TSparkParameter({ name: '2', type: 'DECIMAL', value: new TSparkParameterValue({ stringValue: '6.2' }) }), + ); + }); +}); From 7d90fb424510a9828bc3f5780546fe4b4724b7d0 Mon Sep 17 00:00:00 2001 From: Levko Kravets Date: Tue, 5 Sep 2023 20:09:01 +0300 Subject: [PATCH 05/10] Revert irrelevant changes Signed-off-by: Levko Kravets --- lib/DBSQLClient.ts | 2 +- lib/connection/connections/HttpConnection.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/DBSQLClient.ts b/lib/DBSQLClient.ts index a4823cc0..41418791 100644 --- a/lib/DBSQLClient.ts +++ b/lib/DBSQLClient.ts @@ -1,6 +1,6 @@ -import { EventEmitter } from 'events'; import thrift, { HttpHeaders } from 'thrift'; +import { EventEmitter } from 'events'; import TCLIService from '../thrift/TCLIService'; import { TProtocolVersion } from '../thrift/TCLIService_types'; import IDBSQLClient, { ClientOptions, ConnectionOptions, OpenSessionRequest } from './contracts/IDBSQLClient'; diff --git a/lib/connection/connections/HttpConnection.ts b/lib/connection/connections/HttpConnection.ts index 0cc19155..ec665ed5 100644 --- a/lib/connection/connections/HttpConnection.ts +++ b/lib/connection/connections/HttpConnection.ts @@ -1,6 +1,6 @@ +import thrift from 'thrift'; import https from 'https'; import http, { IncomingMessage } from 'http'; -import thrift from 'thrift'; import IThriftConnection from '../contracts/IThriftConnection'; import IConnectionProvider from '../contracts/IConnectionProvider'; From 6b28a0dde16d27e8b2d203966975854fe25959fa Mon Sep 17 00:00:00 2001 From: Levko Kravets Date: Tue, 5 Sep 2023 21:22:35 +0300 Subject: [PATCH 06/10] Refactoring + improve tests Signed-off-by: Levko Kravets --- lib/DBSQLParameter.ts | 62 ++++++++++++++++--- lib/DBSQLSession.ts | 19 +++++- lib/contracts/IDBSQLSession.ts | 2 +- lib/index.ts | 3 +- lib/utils/convertToSparkParameters.ts | 85 -------------------------- tests/e2e/query_parameters.test.js | 58 ++++++++++++++++++ tests/unit/DBSQLParameter.js | 56 +++++++++++++++++ tests/unit/parameterized_query.test.js | 37 ----------- 8 files changed, 187 insertions(+), 135 deletions(-) delete mode 100644 lib/utils/convertToSparkParameters.ts create mode 100644 tests/e2e/query_parameters.test.js create mode 100644 tests/unit/DBSQLParameter.js delete mode 100644 tests/unit/parameterized_query.test.js diff --git a/lib/DBSQLParameter.ts b/lib/DBSQLParameter.ts index 2ff636cc..bafea5fb 100644 --- a/lib/DBSQLParameter.ts +++ b/lib/DBSQLParameter.ts @@ -1,19 +1,63 @@ -interface ParameterInput { - name?: string; +import Int64 from 'node-int64'; +import { TSparkParameter, TSparkParameterValue } from '../thrift/TCLIService_types'; + +export type DBSQLParameterValue = undefined | null | boolean | number | bigint | Int64 | string; + +interface DBSQLParameterOptions { type?: string; - value?: any; + value?: DBSQLParameterValue; } export default class DBSQLParameter { - name?: string; + public readonly type?: string; - type?: string; + public readonly value: DBSQLParameterValue; - value?: any; - - public constructor({ name, type, value }: ParameterInput) { - this.name = name; + constructor({ type, value }: DBSQLParameterOptions = {}) { this.type = type; this.value = value; } + + public toSparkParameter(): TSparkParameter { + if (this.value === undefined || this.value === null) { + return new TSparkParameter({ + type: this.type ?? 'VOID', + value: new TSparkParameterValue({}), + }); + } + + if (typeof this.value === 'boolean') { + return new TSparkParameter({ + type: this.type ?? 'BOOLEAN', + value: new TSparkParameterValue({ + stringValue: this.value ? 'TRUE' : 'FALSE', + }), + }); + } + + if (typeof this.value === 'number') { + return new TSparkParameter({ + type: this.type ?? (Number.isInteger(this.value) ? 'INTEGER' : 'DOUBLE'), + value: new TSparkParameterValue({ + stringValue: Number(this.value).toString(), + }), + }); + } + + if (this.value instanceof Int64 || typeof this.value === 'bigint') { + return new TSparkParameter({ + type: this.type ?? 'BIGINT', + value: new TSparkParameterValue({ + stringValue: this.value.toString(), + }), + }); + } + + return new TSparkParameter({ + type: this.type ?? 'STRING', + value: new TSparkParameterValue({ + stringValue: this.value, + }), + }); + } } diff --git a/lib/DBSQLSession.ts b/lib/DBSQLSession.ts index ca097d75..b7859e03 100644 --- a/lib/DBSQLSession.ts +++ b/lib/DBSQLSession.ts @@ -5,6 +5,7 @@ import { TOperationHandle, TSparkDirectResults, TSparkArrowTypes, + TSparkParameter, } from '../thrift/TCLIService_types'; import HiveDriver from './hive/HiveDriver'; import { Int64 } from './hive/Types'; @@ -29,7 +30,7 @@ import CloseableCollection from './utils/CloseableCollection'; import IDBSQLLogger, { LogLevel } from './contracts/IDBSQLLogger'; import HiveDriverError from './errors/HiveDriverError'; import globalConfig from './globalConfig'; -import convertToSparkParameters from './utils/convertToSparkParameters'; +import DBSQLParameter from './DBSQLParameter'; const defaultMaxRows = 100000; @@ -75,6 +76,20 @@ function getArrowOptions(): { }; } +function getQueryParameters(namedParameters?: Record): Array { + const result: Array = []; + + if (namedParameters !== undefined) { + for (const name of Object.keys(namedParameters)) { + const param = namedParameters[name].toSparkParameter(); + param.name = name; + result.push(param); + } + } + + return result; +} + interface DBSQLSessionConstructorOptions { logger: IDBSQLLogger; } @@ -141,7 +156,7 @@ export default class DBSQLSession implements IDBSQLSession { ...getDirectResultsOptions(options.maxRows), ...getArrowOptions(), canDownloadResult: options.useCloudFetch ?? globalConfig.useCloudFetch, - parameters: options.parameters ? convertToSparkParameters(options.parameters) : undefined, + parameters: getQueryParameters(options.namedParameters), }); const response = await this.handleResponse(operationPromise); return this.createOperation(response); diff --git a/lib/contracts/IDBSQLSession.ts b/lib/contracts/IDBSQLSession.ts index c705f5d8..0adf6281 100644 --- a/lib/contracts/IDBSQLSession.ts +++ b/lib/contracts/IDBSQLSession.ts @@ -9,7 +9,7 @@ export type ExecuteStatementOptions = { runAsync?: boolean; maxRows?: number | null; useCloudFetch?: boolean; - parameters?: DBSQLParameter[]; + namedParameters?: Record; }; export type TypeInfoRequest = { diff --git a/lib/index.ts b/lib/index.ts index bc12120e..734a672c 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -3,6 +3,7 @@ import TCLIService from '../thrift/TCLIService'; import TCLIService_types from '../thrift/TCLIService_types'; import DBSQLClient from './DBSQLClient'; import DBSQLSession from './DBSQLSession'; +import DBSQLParameter from './DBSQLParameter'; import DBSQLLogger from './DBSQLLogger'; import PlainHttpAuthentication from './connection/auth/PlainHttpAuthentication'; import HttpConnection from './connection/connections/HttpConnection'; @@ -31,4 +32,4 @@ export const utils = { formatProgress, }; -export { DBSQLClient, DBSQLSession, DBSQLLogger, LogLevel }; +export { DBSQLClient, DBSQLSession, DBSQLParameter, DBSQLLogger, LogLevel }; diff --git a/lib/utils/convertToSparkParameters.ts b/lib/utils/convertToSparkParameters.ts deleted file mode 100644 index 3a5e2f74..00000000 --- a/lib/utils/convertToSparkParameters.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { TSparkParameter, TSparkParameterValue } from '../../thrift/TCLIService_types'; -import DBSQLParameter from '../DBSQLParameter'; -import HiveDriverError from '../errors/HiveDriverError'; - -function convertToDBSQLParameters(values: any[]): DBSQLParameter[] { - const params: DBSQLParameter[] = []; - for (const value of values) { - switch (typeof value) { - case 'object': - if (value === null) { - params.push(new DBSQLParameter({})); - break; - } - if (value instanceof DBSQLParameter) { - params.push(value); - break; - } - throw new HiveDriverError('Unsupported object type used for parameterized query.'); - default: - params.push(new DBSQLParameter({ value })); - } - } - return params; -} -// Possible inputs to the params array: -// Naked args (will be converted to DBParameter) -function inferTypes(params: DBSQLParameter[]): void { - for (const param of params) { - if (!param.type) { - switch (typeof param.value) { - case 'undefined': - param.type = 'VOID'; - break; - case 'boolean': - param.type = 'BOOLEAN'; - param.value = param.value.toString(); - break; - case 'number': - if (Number.isInteger(param.value)) { - param.type = 'INTEGER'; - } else { - param.type = 'DOUBLE'; - } - param.value = param.value.toString(); - break; - case 'string': - param.type = 'STRING'; - break; - default: - throw new HiveDriverError('Unsupported object type used for parameterized query.'); - } - } - } -} -export default function convertToSparkParameters(values: any[]): TSparkParameter[] { - const params = convertToDBSQLParameters(values); - const retVal: TSparkParameter[] = []; - inferTypes(params); - for (const param of params) { - switch (typeof param.value) { - case 'string': - retVal.push( - new TSparkParameter({ - name: param.name, - value: new TSparkParameterValue({ stringValue: param.value }), - type: param.type, - }), - ); - break; - case 'undefined': - retVal.push(new TSparkParameter({ name: param.name, value: new TSparkParameterValue({}), type: param.type })); - break; - default: - // Cast to a string and then return param - retVal.push( - new TSparkParameter({ - name: param.name, - value: new TSparkParameterValue({ stringValue: param.value.toString() }), - type: param.type, - }), - ); - } - } - return retVal; -} diff --git a/tests/e2e/query_parameters.test.js b/tests/e2e/query_parameters.test.js new file mode 100644 index 00000000..c1cd7582 --- /dev/null +++ b/tests/e2e/query_parameters.test.js @@ -0,0 +1,58 @@ +const { expect } = require('chai'); +const Int64 = require('node-int64'); +const config = require('./utils/config'); +const { DBSQLClient, DBSQLParameter } = require('../..'); + +const openSession = async () => { + const client = new DBSQLClient(); + + const connection = await client.connect({ + host: config.host, + path: config.path, + token: config.token, + }); + + return connection.openSession({ + initialCatalog: config.database[0], + initialSchema: config.database[1], + }); +}; + +describe('Query parameters', () => { + it('should use named parameters', async () => { + const session = await openSession(); + const operation = await session.executeStatement( + ` + SELECT + :p_bool AS col_bool, + :p_int AS col_int, + :p_double AS col_double, + :p_bigint_1 AS col_bigint_1, + :p_bigint_2 AS col_bigint_2, + :p_str AS col_str + `, + { + runAsync: true, + namedParameters: { + p_bool: new DBSQLParameter({ value: true }), + p_int: new DBSQLParameter({ value: 1234 }), + p_double: new DBSQLParameter({ value: 3.14 }), + p_bigint_1: new DBSQLParameter({ value: BigInt(1234) }), + p_bigint_2: new DBSQLParameter({ value: new Int64(1234) }), + p_str: new DBSQLParameter({ value: 'Hello' }), + }, + }, + ); + const result = await operation.fetchAll(); + expect(result).to.deep.equal([ + { + col_bool: true, + col_int: 1234, + col_double: 3.14, + col_bigint_1: 1234, + col_bigint_2: 1234, + col_str: 'Hello', + }, + ]); + }); +}); diff --git a/tests/unit/DBSQLParameter.js b/tests/unit/DBSQLParameter.js new file mode 100644 index 00000000..8d26acd4 --- /dev/null +++ b/tests/unit/DBSQLParameter.js @@ -0,0 +1,56 @@ +const { expect } = require('chai'); + +const Int64 = require('node-int64'); +const { TSparkParameterValue, TSparkParameter } = require('../../thrift/TCLIService_types'); +const { default: DBSQLParameter } = require('../../dist/DBSQLParameter'); + +describe('DBSQLParameter', () => { + it('should infer types correctly', () => { + const cases = [ + [undefined, new TSparkParameter({ type: 'VOID', value: new TSparkParameterValue() })], + [null, new TSparkParameter({ type: 'VOID', value: new TSparkParameterValue() })], + [false, new TSparkParameter({ type: 'BOOLEAN', value: new TSparkParameterValue({ stringValue: 'FALSE' }) })], + [true, new TSparkParameter({ type: 'BOOLEAN', value: new TSparkParameterValue({ stringValue: 'TRUE' }) })], + [123, new TSparkParameter({ type: 'INTEGER', value: new TSparkParameterValue({ stringValue: '123' }) })], + [3.14, new TSparkParameter({ type: 'DOUBLE', value: new TSparkParameterValue({ stringValue: '3.14' }) })], + [BigInt(1234), new TSparkParameter({ type: 'BIGINT', value: new TSparkParameterValue({ stringValue: '1234' }) })], + [ + new Int64(1234), + new TSparkParameter({ type: 'BIGINT', value: new TSparkParameterValue({ stringValue: '1234' }) }), + ], + ['Hello', new TSparkParameter({ type: 'STRING', value: new TSparkParameterValue({ stringValue: 'Hello' }) })], + ]; + + for (const [value, expectedParam] of cases) { + const dbsqlParam = new DBSQLParameter({ value }); + expect(dbsqlParam.toSparkParameter()).to.deep.equal(expectedParam); + } + }); + + it('should use provided type', () => { + const expectedType = '_CUSTOM_TYPE_'; // it doesn't have to be valid type name, just any string + + const cases = [ + [undefined, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue() })], + [null, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue() })], + [false, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: 'FALSE' }) })], + [true, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: 'TRUE' }) })], + [123, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: '123' }) })], + [3.14, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: '3.14' }) })], + [ + BigInt(1234), + new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: '1234' }) }), + ], + [ + new Int64(1234), + new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: '1234' }) }), + ], + ['Hello', new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: 'Hello' }) })], + ]; + + for (const [value, expectedParam] of cases) { + const dbsqlParam = new DBSQLParameter({ type: expectedType, value }); + expect(dbsqlParam.toSparkParameter()).to.deep.equal(expectedParam); + } + }); +}); diff --git a/tests/unit/parameterized_query.test.js b/tests/unit/parameterized_query.test.js deleted file mode 100644 index a37e1e4c..00000000 --- a/tests/unit/parameterized_query.test.js +++ /dev/null @@ -1,37 +0,0 @@ -const { expect } = require('chai'); - -const DBSQLClient = require('../../dist/DBSQLClient').default; -const convertToSparkParameters = require('../../dist/utils/convertToSparkParameters').default; -const { TSparkParameterValue, TSparkParameter } = require('../../thrift/TCLIService_types'); -const { default: DBSQLParameter } = require('../../dist/DBSQLParameter'); - -describe('Test Inference', () => { - it('should infer types correctly', () => { - let params = convertToSparkParameters([null, 'value', 1, 1.1, true]); - expect(params[0]).to.deep.eq(new TSparkParameter({ type: 'VOID', value: new TSparkParameterValue() })); - expect(params[1]).to.deep.eq( - new TSparkParameter({ type: 'STRING', value: new TSparkParameterValue({ stringValue: 'value' }) }), - ); - expect(params[2]).to.deep.eq( - new TSparkParameter({ type: 'INTEGER', value: new TSparkParameterValue({ stringValue: '1' }) }), - ); - expect(params[3]).to.deep.eq( - new TSparkParameter({ type: 'DOUBLE', value: new TSparkParameterValue({ stringValue: '1.1' }) }), - ); - expect(params[4]).to.deep.eq( - new TSparkParameter({ type: 'BOOLEAN', value: new TSparkParameterValue({ stringValue: 'true' }) }), - ); - }); - it('should preserve name info', () => { - let params = convertToSparkParameters([ - new DBSQLParameter({ name: '1', value: 26 }), - new DBSQLParameter({ name: '2', value: 6.2, type: 'DECIMAL' }), - ]); - expect(params[0]).to.deep.eq( - new TSparkParameter({ name: '1', type: 'INTEGER', value: new TSparkParameterValue({ stringValue: '26' }) }), - ); - expect(params[1]).to.deep.eq( - new TSparkParameter({ name: '2', type: 'DECIMAL', value: new TSparkParameterValue({ stringValue: '6.2' }) }), - ); - }); -}); From ea8e4152056026f0ddd6b814e37404f1e7f4b9ca Mon Sep 17 00:00:00 2001 From: nithinkdb Date: Tue, 5 Sep 2023 16:50:00 -0700 Subject: [PATCH 07/10] Removed void pointers --- lib/DBSQLParameter.ts | 5 +---- tests/unit/{DBSQLParameter.js => DBSQLParameter.test.js} | 4 ---- 2 files changed, 1 insertion(+), 8 deletions(-) rename tests/unit/{DBSQLParameter.js => DBSQLParameter.test.js} (87%) diff --git a/lib/DBSQLParameter.ts b/lib/DBSQLParameter.ts index bafea5fb..f247ca26 100644 --- a/lib/DBSQLParameter.ts +++ b/lib/DBSQLParameter.ts @@ -20,10 +20,7 @@ export default class DBSQLParameter { public toSparkParameter(): TSparkParameter { if (this.value === undefined || this.value === null) { - return new TSparkParameter({ - type: this.type ?? 'VOID', - value: new TSparkParameterValue({}), - }); + throw new TypeError(`Null parameter values are not supported`); } if (typeof this.value === 'boolean') { diff --git a/tests/unit/DBSQLParameter.js b/tests/unit/DBSQLParameter.test.js similarity index 87% rename from tests/unit/DBSQLParameter.js rename to tests/unit/DBSQLParameter.test.js index 8d26acd4..4b4eb625 100644 --- a/tests/unit/DBSQLParameter.js +++ b/tests/unit/DBSQLParameter.test.js @@ -7,8 +7,6 @@ const { default: DBSQLParameter } = require('../../dist/DBSQLParameter'); describe('DBSQLParameter', () => { it('should infer types correctly', () => { const cases = [ - [undefined, new TSparkParameter({ type: 'VOID', value: new TSparkParameterValue() })], - [null, new TSparkParameter({ type: 'VOID', value: new TSparkParameterValue() })], [false, new TSparkParameter({ type: 'BOOLEAN', value: new TSparkParameterValue({ stringValue: 'FALSE' }) })], [true, new TSparkParameter({ type: 'BOOLEAN', value: new TSparkParameterValue({ stringValue: 'TRUE' }) })], [123, new TSparkParameter({ type: 'INTEGER', value: new TSparkParameterValue({ stringValue: '123' }) })], @@ -31,8 +29,6 @@ describe('DBSQLParameter', () => { const expectedType = '_CUSTOM_TYPE_'; // it doesn't have to be valid type name, just any string const cases = [ - [undefined, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue() })], - [null, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue() })], [false, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: 'FALSE' }) })], [true, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: 'TRUE' }) })], [123, new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: '123' }) })], From 4d6de9dd6418afee615c311c36417e2269f12f5f Mon Sep 17 00:00:00 2001 From: Levko Kravets Date: Wed, 6 Sep 2023 06:00:37 +0300 Subject: [PATCH 08/10] Remove leftovers from removing VOID Signed-off-by: Levko Kravets --- lib/DBSQLParameter.ts | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/lib/DBSQLParameter.ts b/lib/DBSQLParameter.ts index f247ca26..309292bd 100644 --- a/lib/DBSQLParameter.ts +++ b/lib/DBSQLParameter.ts @@ -1,11 +1,11 @@ import Int64 from 'node-int64'; import { TSparkParameter, TSparkParameterValue } from '../thrift/TCLIService_types'; -export type DBSQLParameterValue = undefined | null | boolean | number | bigint | Int64 | string; +export type DBSQLParameterValue = boolean | number | bigint | Int64 | string; interface DBSQLParameterOptions { type?: string; - value?: DBSQLParameterValue; + value: DBSQLParameterValue; } export default class DBSQLParameter { @@ -13,16 +13,12 @@ export default class DBSQLParameter { public readonly value: DBSQLParameterValue; - constructor({ type, value }: DBSQLParameterOptions = {}) { + constructor({ type, value }: DBSQLParameterOptions) { this.type = type; this.value = value; } public toSparkParameter(): TSparkParameter { - if (this.value === undefined || this.value === null) { - throw new TypeError(`Null parameter values are not supported`); - } - if (typeof this.value === 'boolean') { return new TSparkParameter({ type: this.type ?? 'BOOLEAN', From a42692860965253e6c8e7cba13e705dff25c2a0a Mon Sep 17 00:00:00 2001 From: Levko Kravets Date: Wed, 6 Sep 2023 06:23:36 +0300 Subject: [PATCH 09/10] Add Date/Time parameters support Signed-off-by: Levko Kravets --- lib/DBSQLParameter.ts | 11 ++++++++++- tests/e2e/query_parameters.test.js | 22 ++++++++++++++-------- tests/unit/DBSQLParameter.test.js | 14 ++++++++++++++ 3 files changed, 38 insertions(+), 9 deletions(-) diff --git a/lib/DBSQLParameter.ts b/lib/DBSQLParameter.ts index 309292bd..93c1839e 100644 --- a/lib/DBSQLParameter.ts +++ b/lib/DBSQLParameter.ts @@ -1,7 +1,7 @@ import Int64 from 'node-int64'; import { TSparkParameter, TSparkParameterValue } from '../thrift/TCLIService_types'; -export type DBSQLParameterValue = boolean | number | bigint | Int64 | string; +export type DBSQLParameterValue = boolean | number | bigint | Int64 | Date | string; interface DBSQLParameterOptions { type?: string; @@ -46,6 +46,15 @@ export default class DBSQLParameter { }); } + if (this.value instanceof Date) { + return new TSparkParameter({ + type: this.type ?? 'TIMESTAMP', + value: new TSparkParameterValue({ + stringValue: this.value.toISOString(), + }), + }); + } + return new TSparkParameter({ type: this.type ?? 'STRING', value: new TSparkParameterValue({ diff --git a/tests/e2e/query_parameters.test.js b/tests/e2e/query_parameters.test.js index c1cd7582..e169ea6a 100644 --- a/tests/e2e/query_parameters.test.js +++ b/tests/e2e/query_parameters.test.js @@ -23,14 +23,16 @@ describe('Query parameters', () => { const session = await openSession(); const operation = await session.executeStatement( ` - SELECT - :p_bool AS col_bool, - :p_int AS col_int, - :p_double AS col_double, - :p_bigint_1 AS col_bigint_1, - :p_bigint_2 AS col_bigint_2, - :p_str AS col_str - `, + SELECT + :p_bool AS col_bool, + :p_int AS col_int, + :p_double AS col_double, + :p_bigint_1 AS col_bigint_1, + :p_bigint_2 AS col_bigint_2, + :p_date as col_date, + :p_timestamp as col_timestamp, + :p_str AS col_str + `, { runAsync: true, namedParameters: { @@ -39,6 +41,8 @@ describe('Query parameters', () => { p_double: new DBSQLParameter({ value: 3.14 }), p_bigint_1: new DBSQLParameter({ value: BigInt(1234) }), p_bigint_2: new DBSQLParameter({ value: new Int64(1234) }), + p_date: new DBSQLParameter({ value: new Date('2023-09-06T03:14:27.843Z'), type: 'DATE' }), + p_timestamp: new DBSQLParameter({ value: new Date('2023-09-06T03:14:27.843Z') }), p_str: new DBSQLParameter({ value: 'Hello' }), }, }, @@ -51,6 +55,8 @@ describe('Query parameters', () => { col_double: 3.14, col_bigint_1: 1234, col_bigint_2: 1234, + col_date: new Date('2023-09-06T00:00:00.000Z'), + col_timestamp: new Date('2023-09-06T03:14:27.843Z'), col_str: 'Hello', }, ]); diff --git a/tests/unit/DBSQLParameter.test.js b/tests/unit/DBSQLParameter.test.js index 4b4eb625..1817644c 100644 --- a/tests/unit/DBSQLParameter.test.js +++ b/tests/unit/DBSQLParameter.test.js @@ -16,6 +16,13 @@ describe('DBSQLParameter', () => { new Int64(1234), new TSparkParameter({ type: 'BIGINT', value: new TSparkParameterValue({ stringValue: '1234' }) }), ], + [ + new Date('2023-09-06T03:14:27.843Z'), + new TSparkParameter({ + type: 'TIMESTAMP', + value: new TSparkParameterValue({ stringValue: '2023-09-06T03:14:27.843Z' }), + }), + ], ['Hello', new TSparkParameter({ type: 'STRING', value: new TSparkParameterValue({ stringValue: 'Hello' }) })], ]; @@ -41,6 +48,13 @@ describe('DBSQLParameter', () => { new Int64(1234), new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: '1234' }) }), ], + [ + new Date('2023-09-06T03:14:27.843Z'), + new TSparkParameter({ + type: expectedType, + value: new TSparkParameterValue({ stringValue: '2023-09-06T03:14:27.843Z' }), + }), + ], ['Hello', new TSparkParameter({ type: expectedType, value: new TSparkParameterValue({ stringValue: 'Hello' }) })], ]; From 9d2fbe68be5159d61baea7e1d0792a15062e92a6 Mon Sep 17 00:00:00 2001 From: Levko Kravets Date: Wed, 6 Sep 2023 19:59:36 +0300 Subject: [PATCH 10/10] Support primitive values as named parameters Signed-off-by: Levko Kravets --- lib/DBSQLSession.ts | 14 +++++++---- lib/contracts/IDBSQLSession.ts | 4 +-- tests/e2e/query_parameters.test.js | 40 ++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 7 deletions(-) diff --git a/lib/DBSQLSession.ts b/lib/DBSQLSession.ts index b7859e03..d6631a52 100644 --- a/lib/DBSQLSession.ts +++ b/lib/DBSQLSession.ts @@ -30,7 +30,7 @@ import CloseableCollection from './utils/CloseableCollection'; import IDBSQLLogger, { LogLevel } from './contracts/IDBSQLLogger'; import HiveDriverError from './errors/HiveDriverError'; import globalConfig from './globalConfig'; -import DBSQLParameter from './DBSQLParameter'; +import DBSQLParameter, { DBSQLParameterValue } from './DBSQLParameter'; const defaultMaxRows = 100000; @@ -76,14 +76,18 @@ function getArrowOptions(): { }; } -function getQueryParameters(namedParameters?: Record): Array { +function getQueryParameters( + namedParameters?: Record, +): Array { const result: Array = []; if (namedParameters !== undefined) { for (const name of Object.keys(namedParameters)) { - const param = namedParameters[name].toSparkParameter(); - param.name = name; - result.push(param); + const value = namedParameters[name]; + const param = value instanceof DBSQLParameter ? value : new DBSQLParameter({ value }); + const sparkParam = param.toSparkParameter(); + sparkParam.name = name; + result.push(sparkParam); } } diff --git a/lib/contracts/IDBSQLSession.ts b/lib/contracts/IDBSQLSession.ts index 0adf6281..db490040 100644 --- a/lib/contracts/IDBSQLSession.ts +++ b/lib/contracts/IDBSQLSession.ts @@ -2,14 +2,14 @@ import IOperation from './IOperation'; import Status from '../dto/Status'; import InfoValue from '../dto/InfoValue'; import { Int64 } from '../hive/Types'; -import DBSQLParameter from '../DBSQLParameter'; +import DBSQLParameter, { DBSQLParameterValue } from '../DBSQLParameter'; export type ExecuteStatementOptions = { queryTimeout?: Int64; runAsync?: boolean; maxRows?: number | null; useCloudFetch?: boolean; - namedParameters?: Record; + namedParameters?: Record; }; export type TypeInfoRequest = { diff --git a/tests/e2e/query_parameters.test.js b/tests/e2e/query_parameters.test.js index e169ea6a..292bc182 100644 --- a/tests/e2e/query_parameters.test.js +++ b/tests/e2e/query_parameters.test.js @@ -61,4 +61,44 @@ describe('Query parameters', () => { }, ]); }); + + it('should accept primitives as values for named parameters', async () => { + const session = await openSession(); + const operation = await session.executeStatement( + ` + SELECT + :p_bool AS col_bool, + :p_int AS col_int, + :p_double AS col_double, + :p_bigint_1 AS col_bigint_1, + :p_bigint_2 AS col_bigint_2, + :p_timestamp as col_timestamp, + :p_str AS col_str + `, + { + runAsync: true, + namedParameters: { + p_bool: true, + p_int: 1234, + p_double: 3.14, + p_bigint_1: BigInt(1234), + p_bigint_2: new Int64(1234), + p_timestamp: new Date('2023-09-06T03:14:27.843Z'), + p_str: 'Hello', + }, + }, + ); + const result = await operation.fetchAll(); + expect(result).to.deep.equal([ + { + col_bool: true, + col_int: 1234, + col_double: 3.14, + col_bigint_1: 1234, + col_bigint_2: 1234, + col_timestamp: new Date('2023-09-06T03:14:27.843Z'), + col_str: 'Hello', + }, + ]); + }); });