diff --git a/lib/DBSQLOperation.ts b/lib/DBSQLOperation.ts index 876b3438..60d23db8 100644 --- a/lib/DBSQLOperation.ts +++ b/lib/DBSQLOperation.ts @@ -372,20 +372,20 @@ export default class DBSQLOperation implements IOperation { switch (resultFormat) { case TSparkRowSetType.COLUMN_BASED_SET: - resultSource = new JsonResultHandler(this.context, this._data, metadata.schema); + resultSource = new JsonResultHandler(this.context, this._data, metadata); break; case TSparkRowSetType.ARROW_BASED_SET: resultSource = new ArrowResultConverter( this.context, - new ArrowResultHandler(this.context, this._data, metadata.arrowSchema, metadata.lz4Compressed), - metadata.schema, + new ArrowResultHandler(this.context, this._data, metadata), + metadata, ); break; case TSparkRowSetType.URL_BASED_SET: resultSource = new ArrowResultConverter( this.context, - new CloudFetchResultHandler(this.context, this._data, metadata.lz4Compressed), - metadata.schema, + new CloudFetchResultHandler(this.context, this._data, metadata), + metadata, ); break; // no default diff --git a/lib/result/ArrowResultConverter.ts b/lib/result/ArrowResultConverter.ts index 1235b2b9..e1ada6f1 100644 --- a/lib/result/ArrowResultConverter.ts +++ b/lib/result/ArrowResultConverter.ts @@ -13,7 +13,7 @@ import { RecordBatchReader, util as arrowUtils, } from 'apache-arrow'; -import { TTableSchema, TColumnDesc } from '../../thrift/TCLIService_types'; +import { TGetResultSetMetadataResp, TColumnDesc } from '../../thrift/TCLIService_types'; import IClientContext from '../contracts/IClientContext'; import IResultsProvider, { ResultsProviderFetchNextOptions } from './IResultsProvider'; import { getSchemaColumns, convertThriftValue } from './utils'; @@ -34,7 +34,7 @@ export default class ArrowResultConverter implements IResultsProvider private pendingRecordBatch?: RecordBatch; - constructor(context: IClientContext, source: IResultsProvider>, schema?: TTableSchema) { + constructor(context: IClientContext, source: IResultsProvider>, { schema }: TGetResultSetMetadataResp) { this.context = context; this.source = source; this.schema = getSchemaColumns(schema); diff --git a/lib/result/ArrowResultHandler.ts b/lib/result/ArrowResultHandler.ts index 52e2aafa..2b9a3238 100644 --- a/lib/result/ArrowResultHandler.ts +++ b/lib/result/ArrowResultHandler.ts @@ -1,7 +1,8 @@ import LZ4 from 'lz4'; -import { TRowSet } from '../../thrift/TCLIService_types'; +import { TGetResultSetMetadataResp, TRowSet } from '../../thrift/TCLIService_types'; import IClientContext from '../contracts/IClientContext'; import IResultsProvider, { ResultsProviderFetchNextOptions } from './IResultsProvider'; +import { hiveSchemaToArrowSchema } from './utils'; export default class ArrowResultHandler implements IResultsProvider> { protected readonly context: IClientContext; @@ -15,13 +16,14 @@ export default class ArrowResultHandler implements IResultsProvider, - arrowSchema?: Buffer, - isLZ4Compressed?: boolean, + { schema, arrowSchema, lz4Compressed }: TGetResultSetMetadataResp, ) { this.context = context; this.source = source; - this.arrowSchema = arrowSchema; - this.isLZ4Compressed = isLZ4Compressed ?? false; + // Arrow schema is not available in old DBR versions, which also don't support native Arrow types, + // so it's possible to infer Arrow schema from Hive schema ignoring `useArrowNativeTypes` option + this.arrowSchema = arrowSchema ?? hiveSchemaToArrowSchema(schema); + this.isLZ4Compressed = lz4Compressed ?? false; } public async hasMore() { diff --git a/lib/result/CloudFetchResultHandler.ts b/lib/result/CloudFetchResultHandler.ts index f6b40164..e35cbc6f 100644 --- a/lib/result/CloudFetchResultHandler.ts +++ b/lib/result/CloudFetchResultHandler.ts @@ -1,6 +1,6 @@ import LZ4 from 'lz4'; import fetch, { RequestInfo, RequestInit } from 'node-fetch'; -import { TRowSet, TSparkArrowResultLink } from '../../thrift/TCLIService_types'; +import { TGetResultSetMetadataResp, TRowSet, TSparkArrowResultLink } from '../../thrift/TCLIService_types'; import IClientContext from '../contracts/IClientContext'; import IResultsProvider, { ResultsProviderFetchNextOptions } from './IResultsProvider'; @@ -15,10 +15,14 @@ export default class CloudFetchResultHandler implements IResultsProvider> = []; - constructor(context: IClientContext, source: IResultsProvider, isLZ4Compressed?: boolean) { + constructor( + context: IClientContext, + source: IResultsProvider, + { lz4Compressed }: TGetResultSetMetadataResp, + ) { this.context = context; this.source = source; - this.isLZ4Compressed = isLZ4Compressed ?? false; + this.isLZ4Compressed = lz4Compressed ?? false; } public async hasMore() { diff --git a/lib/result/JsonResultHandler.ts b/lib/result/JsonResultHandler.ts index bcc07e77..02a084af 100644 --- a/lib/result/JsonResultHandler.ts +++ b/lib/result/JsonResultHandler.ts @@ -1,5 +1,5 @@ import { ColumnCode } from '../hive/Types'; -import { TRowSet, TTableSchema, TColumn, TColumnDesc } from '../../thrift/TCLIService_types'; +import { TGetResultSetMetadataResp, TRowSet, TColumn, TColumnDesc } from '../../thrift/TCLIService_types'; import IClientContext from '../contracts/IClientContext'; import IResultsProvider, { ResultsProviderFetchNextOptions } from './IResultsProvider'; import { getSchemaColumns, convertThriftValue } from './utils'; @@ -11,7 +11,11 @@ export default class JsonResultHandler implements IResultsProvider> { private readonly schema: Array; - constructor(context: IClientContext, source: IResultsProvider, schema?: TTableSchema) { + constructor( + context: IClientContext, + source: IResultsProvider, + { schema }: TGetResultSetMetadataResp, + ) { this.context = context; this.source = source; this.schema = getSchemaColumns(schema); diff --git a/lib/result/ResultSlicer.ts b/lib/result/ResultSlicer.ts index 0f640a9a..11a2a15f 100644 --- a/lib/result/ResultSlicer.ts +++ b/lib/result/ResultSlicer.ts @@ -52,11 +52,13 @@ export default class ResultSlicer implements IResultsProvider> { // Fetch items from source results provider until we reach a requested count while (resultsCount < options.limit) { // eslint-disable-next-line no-await-in-loop - const chunk = await this.source.fetchNext(options); - if (chunk.length === 0) { + const hasMore = await this.source.hasMore(); + if (!hasMore) { break; } + // eslint-disable-next-line no-await-in-loop + const chunk = await this.source.fetchNext(options); result.push(chunk); resultsCount += chunk.length; } diff --git a/lib/result/utils.ts b/lib/result/utils.ts index b4351df0..25e4d067 100644 --- a/lib/result/utils.ts +++ b/lib/result/utils.ts @@ -1,5 +1,23 @@ import Int64 from 'node-int64'; +import { + Schema, + Field, + DataType, + Bool as ArrowBool, + Int8 as ArrowInt8, + Int16 as ArrowInt16, + Int32 as ArrowInt32, + Int64 as ArrowInt64, + Float32 as ArrowFloat32, + Float64 as ArrowFloat64, + Utf8 as ArrowString, + Date_ as ArrowDate, + Binary as ArrowBinary, + DateUnit, + RecordBatchWriter, +} from 'apache-arrow'; import { TTableSchema, TColumnDesc, TPrimitiveTypeEntry, TTypeId } from '../../thrift/TCLIService_types'; +import HiveDriverError from '../errors/HiveDriverError'; export function getSchemaColumns(schema?: TTableSchema): Array { if (!schema) { @@ -73,3 +91,52 @@ export function convertThriftValue(typeDescriptor: TPrimitiveTypeEntry | undefin return value; } } + +// This type map corresponds to Arrow without native types support (most complex types are serialized as strings) +const hiveTypeToArrowType: Record = { + [TTypeId.BOOLEAN_TYPE]: new ArrowBool(), + [TTypeId.TINYINT_TYPE]: new ArrowInt8(), + [TTypeId.SMALLINT_TYPE]: new ArrowInt16(), + [TTypeId.INT_TYPE]: new ArrowInt32(), + [TTypeId.BIGINT_TYPE]: new ArrowInt64(), + [TTypeId.FLOAT_TYPE]: new ArrowFloat32(), + [TTypeId.DOUBLE_TYPE]: new ArrowFloat64(), + [TTypeId.STRING_TYPE]: new ArrowString(), + [TTypeId.TIMESTAMP_TYPE]: new ArrowString(), + [TTypeId.BINARY_TYPE]: new ArrowBinary(), + [TTypeId.ARRAY_TYPE]: new ArrowString(), + [TTypeId.MAP_TYPE]: new ArrowString(), + [TTypeId.STRUCT_TYPE]: new ArrowString(), + [TTypeId.UNION_TYPE]: new ArrowString(), + [TTypeId.USER_DEFINED_TYPE]: new ArrowString(), + [TTypeId.DECIMAL_TYPE]: new ArrowString(), + [TTypeId.NULL_TYPE]: null, + [TTypeId.DATE_TYPE]: new ArrowDate(DateUnit.DAY), + [TTypeId.VARCHAR_TYPE]: new ArrowString(), + [TTypeId.CHAR_TYPE]: new ArrowString(), + [TTypeId.INTERVAL_YEAR_MONTH_TYPE]: new ArrowString(), + [TTypeId.INTERVAL_DAY_TIME_TYPE]: new ArrowString(), +}; + +export function hiveSchemaToArrowSchema(schema?: TTableSchema): Buffer | undefined { + if (!schema) { + return undefined; + } + + const columns = getSchemaColumns(schema); + + const arrowFields = columns.map((column) => { + const hiveType = column.typeDesc.types[0].primitiveEntry?.type ?? undefined; + const arrowType = hiveType !== undefined ? hiveTypeToArrowType[hiveType] : undefined; + if (!arrowType) { + throw new HiveDriverError(`Unsupported column type: ${hiveType ? TTypeId[hiveType] : 'undefined'}`); + } + return new Field(column.columnName, arrowType, true); + }); + + const arrowSchema = new Schema(arrowFields); + const writer = new RecordBatchWriter(); + writer.reset(undefined, arrowSchema); + writer.finish(); + return Buffer.from(writer.toUint8Array(true)); +} diff --git a/tests/unit/result/ArrowResultConverter.test.js b/tests/unit/result/ArrowResultConverter.test.js index 3ff87a15..2cf4949f 100644 --- a/tests/unit/result/ArrowResultConverter.test.js +++ b/tests/unit/result/ArrowResultConverter.test.js @@ -57,14 +57,14 @@ describe('ArrowResultHandler', () => { it('should convert data', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleArrowBatch]); - const result = new ArrowResultConverter(context, rowSetProvider, sampleThriftSchema); + const result = new ArrowResultConverter(context, rowSetProvider, { schema: sampleThriftSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([{ 1: 1 }]); }); it('should return empty array if no data to process', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([], []); - const result = new ArrowResultConverter(context, rowSetProvider, sampleThriftSchema); + const result = new ArrowResultConverter(context, rowSetProvider, { schema: sampleThriftSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; }); @@ -72,7 +72,7 @@ describe('ArrowResultHandler', () => { it('should return empty array if no schema available', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleArrowBatch]); - const result = new ArrowResultConverter(context, rowSetProvider); + const result = new ArrowResultConverter(context, rowSetProvider, {}); expect(await result.hasMore()).to.be.false; expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); }); @@ -80,7 +80,7 @@ describe('ArrowResultHandler', () => { it('should detect nulls', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([arrowBatchAllNulls]); - const result = new ArrowResultConverter(context, rowSetProvider, thriftSchemaAllNulls); + const result = new ArrowResultConverter(context, rowSetProvider, { schema: thriftSchemaAllNulls }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([ { boolean_field: null, diff --git a/tests/unit/result/ArrowResultHandler.test.js b/tests/unit/result/ArrowResultHandler.test.js index eb70544b..74bf37c3 100644 --- a/tests/unit/result/ArrowResultHandler.test.js +++ b/tests/unit/result/ArrowResultHandler.test.js @@ -61,7 +61,7 @@ describe('ArrowResultHandler', () => { it('should return data', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleRowSet1]); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema); + const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); const batches = await result.fetchNext({ limit: 10000 }); expect(await rowSetProvider.hasMore()).to.be.false; @@ -74,7 +74,10 @@ describe('ArrowResultHandler', () => { it('should handle LZ4 compressed data', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleRowSet1LZ4Compressed]); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema, true); + const result = new ArrowResultHandler(context, rowSetProvider, { + arrowSchema: sampleArrowSchema, + lz4Compressed: true, + }); const batches = await result.fetchNext({ limit: 10000 }); expect(await rowSetProvider.hasMore()).to.be.false; @@ -87,7 +90,7 @@ describe('ArrowResultHandler', () => { it('should not buffer any data', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleRowSet1]); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema); + const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); expect(await rowSetProvider.hasMore()).to.be.true; expect(await result.hasMore()).to.be.true; @@ -100,34 +103,61 @@ describe('ArrowResultHandler', () => { const context = {}; case1: { const rowSetProvider = new ResultsProviderMock(); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema); + const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; } case2: { const rowSetProvider = new ResultsProviderMock([sampleRowSet2]); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema); + const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; } case3: { const rowSetProvider = new ResultsProviderMock([sampleRowSet3]); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema); + const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; } case4: { const rowSetProvider = new ResultsProviderMock([sampleRowSet4]); - const result = new ArrowResultHandler(context, rowSetProvider, sampleArrowSchema); + const result = new ArrowResultHandler(context, rowSetProvider, { arrowSchema: sampleArrowSchema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; } }); + it('should infer arrow schema from thrift schema', async () => { + const context = {}; + const rowSetProvider = new ResultsProviderMock([sampleRowSet2]); + + const sampleThriftSchema = { + columns: [ + { + columnName: '1', + typeDesc: { + types: [ + { + primitiveEntry: { + type: 3, + typeQualifiers: null, + }, + }, + ], + }, + position: 1, + }, + ], + }; + + const result = new ArrowResultHandler(context, rowSetProvider, { schema: sampleThriftSchema }); + expect(result.arrowSchema).to.not.be.undefined; + }); + it('should return empty array if no schema available', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock([sampleRowSet2]); - const result = new ArrowResultHandler(context, rowSetProvider); + const result = new ArrowResultHandler(context, rowSetProvider, {}); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); expect(await result.hasMore()).to.be.false; }); diff --git a/tests/unit/result/CloudFetchResultHandler.test.js b/tests/unit/result/CloudFetchResultHandler.test.js index 44b8475e..96597779 100644 --- a/tests/unit/result/CloudFetchResultHandler.test.js +++ b/tests/unit/result/CloudFetchResultHandler.test.js @@ -86,7 +86,7 @@ describe('CloudFetchResultHandler', () => { getConfig: () => clientConfig, }; - const result = new CloudFetchResultHandler(context, rowSetProvider); + const result = new CloudFetchResultHandler(context, rowSetProvider, {}); case1: { result.pendingLinks = []; @@ -119,7 +119,7 @@ describe('CloudFetchResultHandler', () => { getConfig: () => clientConfig, }; - const result = new CloudFetchResultHandler(context, rowSetProvider); + const result = new CloudFetchResultHandler(context, rowSetProvider, {}); sinon.stub(result, 'fetch').returns( Promise.resolve({ @@ -153,7 +153,7 @@ describe('CloudFetchResultHandler', () => { getConfig: () => clientConfig, }; - const result = new CloudFetchResultHandler(context, rowSetProvider); + const result = new CloudFetchResultHandler(context, rowSetProvider, {}); sinon.stub(result, 'fetch').returns( Promise.resolve({ @@ -213,7 +213,7 @@ describe('CloudFetchResultHandler', () => { getConfig: () => clientConfig, }; - const result = new CloudFetchResultHandler(context, rowSetProvider, true); + const result = new CloudFetchResultHandler(context, rowSetProvider, { lz4Compressed: true }); const expectedBatch = Buffer.concat([sampleArrowSchema, sampleArrowBatch]); @@ -244,7 +244,7 @@ describe('CloudFetchResultHandler', () => { getConfig: () => clientConfig, }; - const result = new CloudFetchResultHandler(context, rowSetProvider); + const result = new CloudFetchResultHandler(context, rowSetProvider, {}); sinon.stub(result, 'fetch').returns( Promise.resolve({ @@ -275,7 +275,7 @@ describe('CloudFetchResultHandler', () => { getConfig: () => clientConfig, }; - const result = new CloudFetchResultHandler(context, rowSetProvider); + const result = new CloudFetchResultHandler(context, rowSetProvider, {}); sinon.stub(result, 'fetch').returns( Promise.resolve({ diff --git a/tests/unit/result/JsonResultHandler.test.js b/tests/unit/result/JsonResultHandler.test.js index d6c3bf09..4c85147b 100644 --- a/tests/unit/result/JsonResultHandler.test.js +++ b/tests/unit/result/JsonResultHandler.test.js @@ -42,7 +42,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(data); - const result = new JsonResultHandler(context, rowSetProvider, schema); + const result = new JsonResultHandler(context, rowSetProvider, { schema }); expect(await rowSetProvider.hasMore()).to.be.true; expect(await result.hasMore()).to.be.true; @@ -135,7 +135,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(data); - const result = new JsonResultHandler(context, rowSetProvider, schema); + const result = new JsonResultHandler(context, rowSetProvider, { schema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([ { @@ -208,7 +208,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(data); - const result = new JsonResultHandler(context, rowSetProvider, schema); + const result = new JsonResultHandler(context, rowSetProvider, { schema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([ { @@ -230,7 +230,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(); - const result = new JsonResultHandler(context, rowSetProvider, null); + const result = new JsonResultHandler(context, rowSetProvider, { schema: null }); const buf = Buffer.from([0x55, 0xaa, 0xc3]); [ @@ -345,7 +345,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(data); - const result = new JsonResultHandler(context, rowSetProvider, schema); + const result = new JsonResultHandler(context, rowSetProvider, { schema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([ { @@ -377,7 +377,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(); - const result = new JsonResultHandler(context, rowSetProvider, schema); + const result = new JsonResultHandler(context, rowSetProvider, { schema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); }); @@ -395,7 +395,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(data); - const result = new JsonResultHandler(context, rowSetProvider); + const result = new JsonResultHandler(context, rowSetProvider, {}); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([]); }); @@ -431,7 +431,7 @@ describe('JsonResultHandler', () => { const context = {}; const rowSetProvider = new ResultsProviderMock(data); - const result = new JsonResultHandler(context, rowSetProvider, schema); + const result = new JsonResultHandler(context, rowSetProvider, { schema }); expect(await result.fetchNext({ limit: 10000 })).to.be.deep.eq([ { diff --git a/tests/unit/result/compatibility.test.js b/tests/unit/result/compatibility.test.js index c01f6674..eb4119b5 100644 --- a/tests/unit/result/compatibility.test.js +++ b/tests/unit/result/compatibility.test.js @@ -2,6 +2,7 @@ const { expect } = require('chai'); const ArrowResultHandler = require('../../../dist/result/ArrowResultHandler').default; const ArrowResultConverter = require('../../../dist/result/ArrowResultConverter').default; const JsonResultHandler = require('../../../dist/result/JsonResultHandler').default; +const ResultSlicer = require('../../../dist/result/ResultSlicer').default; const { fixArrowResult } = require('../../fixtures/compatibility'); const fixtureColumn = require('../../fixtures/compatibility/column'); @@ -14,7 +15,10 @@ describe('Result handlers compatibility tests', () => { it('colum-based data', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock(fixtureColumn.rowSets); - const result = new JsonResultHandler(context, rowSetProvider, fixtureColumn.schema); + const result = new ResultSlicer( + context, + new JsonResultHandler(context, rowSetProvider, { schema: fixtureColumn.schema }), + ); const rows = await result.fetchNext({ limit: 10000 }); expect(rows).to.deep.equal(fixtureColumn.expected); }); @@ -22,10 +26,13 @@ describe('Result handlers compatibility tests', () => { it('arrow-based data without native types', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock(fixtureArrow.rowSets); - const result = new ArrowResultConverter( + const result = new ResultSlicer( context, - new ArrowResultHandler(context, rowSetProvider, fixtureArrow.arrowSchema), - fixtureArrow.schema, + new ArrowResultConverter( + context, + new ArrowResultHandler(context, rowSetProvider, { arrowSchema: fixtureArrow.arrowSchema }), + { schema: fixtureArrow.schema }, + ), ); const rows = await result.fetchNext({ limit: 10000 }); expect(fixArrowResult(rows)).to.deep.equal(fixtureArrow.expected); @@ -34,12 +41,30 @@ describe('Result handlers compatibility tests', () => { it('arrow-based data with native types', async () => { const context = {}; const rowSetProvider = new ResultsProviderMock(fixtureArrowNT.rowSets); - const result = new ArrowResultConverter( + const result = new ResultSlicer( context, - new ArrowResultHandler(context, rowSetProvider, fixtureArrowNT.arrowSchema), - fixtureArrowNT.schema, + new ArrowResultConverter( + context, + new ArrowResultHandler(context, rowSetProvider, { arrowSchema: fixtureArrowNT.arrowSchema }), + { schema: fixtureArrowNT.schema }, + ), ); const rows = await result.fetchNext({ limit: 10000 }); expect(fixArrowResult(rows)).to.deep.equal(fixtureArrowNT.expected); }); + + it('should infer arrow schema from thrift schema', async () => { + const context = {}; + const rowSetProvider = new ResultsProviderMock(fixtureArrow.rowSets); + const result = new ResultSlicer( + context, + new ArrowResultConverter( + context, + new ArrowResultHandler(context, rowSetProvider, { schema: fixtureArrow.schema }), + { schema: fixtureArrow.schema }, + ), + ); + const rows = await result.fetchNext({ limit: 10000 }); + expect(fixArrowResult(rows)).to.deep.equal(fixtureArrow.expected); + }); });