diff --git a/CHANGELOG.md b/CHANGELOG.md index 96c82ac7..ee4ea463 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://www.npmjs.com/package/@google-cloud/bigquery?activeTab=versions +## [7.6.0](https://github.com/googleapis/nodejs-bigquery/compare/v7.5.2...v7.6.0) (2024-04-09) + + +### Features + +* Integrate jobs.query and stateless query for faster queries ([#1337](https://github.com/googleapis/nodejs-bigquery/issues/1337)) ([74aa150](https://github.com/googleapis/nodejs-bigquery/commit/74aa1501452c36af7969bb4a46b996485d9ca91b)) + + +### Bug Fixes + +* Parsing zero value timestamp ([#1355](https://github.com/googleapis/nodejs-bigquery/issues/1355)) ([d433711](https://github.com/googleapis/nodejs-bigquery/commit/d4337118bf5708e2cb3e0036028a6d0dc5abf22f)) + ## [7.5.2](https://github.com/googleapis/nodejs-bigquery/compare/v7.5.1...v7.5.2) (2024-03-27) diff --git a/package.json b/package.json index 88e4406c..4028d0fa 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@google-cloud/bigquery", "description": "Google BigQuery Client Library for Node.js", - "version": "7.5.2", + "version": "7.6.0", "license": "Apache-2.0", "author": "Google LLC", "engines": { diff --git a/samples/package.json b/samples/package.json index 356fa11f..40a1ddc2 100644 --- a/samples/package.json +++ b/samples/package.json @@ -17,7 +17,7 @@ "fix": "gts fix" }, "dependencies": { - "@google-cloud/bigquery": "^7.5.2", + "@google-cloud/bigquery": "^7.6.0", "@google-cloud/storage": "^7.0.0", "google-auth-library": "^9.6.0", "readline-promise": "^1.0.4", diff --git a/src/bigquery.ts b/src/bigquery.ts index 0cfc686a..58c0e441 100644 --- a/src/bigquery.ts +++ b/src/bigquery.ts @@ -45,6 +45,7 @@ import { } from './table'; import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; import bigquery from './types'; +import {logger, setLogFunction} from './logger'; // Third-Party Re-exports export {common}; @@ -164,6 +165,9 @@ export type GetJobsCallback = PagedCallback< bigquery.IJobList >; +export type JobsQueryResponse = [Job, bigquery.IQueryResponse]; +export type JobsQueryCallback = ResourceCallback; + export interface BigQueryTimeOptions { hours?: number | string; minutes?: number | string; @@ -194,6 +198,7 @@ export interface ProvidedTypeStruct { } export type QueryParameter = bigquery.IQueryParameter; +export type ParameterMode = bigquery.IJobConfigurationQuery['parameterMode']; export interface BigQueryOptions extends GoogleAuthOptions { /** @@ -282,6 +287,12 @@ export const PROTOCOL_REGEX = /^(\w*):\/\//; * We will create a table with the correct schema, import the public CSV file * into that table, and query it for data. * + * This client supports enabling query-related preview features via environmental + * variables. By setting the environment variable QUERY_PREVIEW_ENABLED to the string + * "TRUE", the client will enable preview features, though behavior may still be + * controlled via the bigquery service as well. Currently, the feature(s) in scope + * include: stateless queries (query execution without corresponding job metadata). + * * @class * * See {@link https://cloud.google.com/bigquery/what-is-bigquery| What is BigQuery?} @@ -318,6 +329,7 @@ export const PROTOCOL_REGEX = /^(\w*):\/\//; export class BigQuery extends Service { location?: string; private _universeDomain: string; + private _enableQueryPreview: boolean; createQueryStream(options?: Query | string): ResourceStream { // placeholder body, overwritten in constructor @@ -375,6 +387,14 @@ export class BigQuery extends Service { super(config, options); + const QUERY_PREVIEW_ENABLED = process.env.QUERY_PREVIEW_ENABLED; + this._enableQueryPreview = false; + if (typeof QUERY_PREVIEW_ENABLED === 'string') { + if (QUERY_PREVIEW_ENABLED.toUpperCase() === 'TRUE') { + this._enableQueryPreview = true; + } + } + this._universeDomain = universeDomain; this.location = options.location; /** @@ -488,6 +508,11 @@ export class BigQuery extends Service { }); } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace_(msg: string, ...otherArgs: any[]) { + logger('[bigquery]', msg, ...otherArgs); + } + get universeDomain() { return this._universeDomain; } @@ -640,7 +665,8 @@ export class BigQuery extends Service { break; } case 'TIMESTAMP': { - const pd = new PreciseDate(BigInt(value) * BigInt(1000)); + const pd = new PreciseDate(); + pd.setFullTime(PreciseDate.parseFull(BigInt(value) * BigInt(1000))); value = BigQuery.timestamp(pd); break; } @@ -1435,18 +1461,19 @@ export class BigQuery extends Service { callback?: JobCallback ): void | Promise { const options = typeof opts === 'object' ? opts : {query: opts}; + this.trace_('[createQueryJob]', options, callback); if ((!options || !options.query) && !options.pageToken) { throw new Error('A SQL query string is required.'); } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const query: any = extend( + const query: Query = extend( true, { useLegacySql: false, }, options ); + this.trace_('[createQueryJob]', query); if (options.destination) { if (!(options.destination instanceof Table)) { @@ -1462,78 +1489,21 @@ export class BigQuery extends Service { delete query.destination; } - if (query.params) { - query.parameterMode = is.array(query.params) ? 'positional' : 'named'; - - if (query.parameterMode === 'named') { - query.queryParameters = []; - - // tslint:disable-next-line forin - for (const namedParameter in query.params) { - const value = query.params[namedParameter]; - let queryParameter; - - if (query.types) { - if (!is.object(query.types)) { - throw new Error( - 'Provided types must match the value type passed to `params`' - ); - } - - if (query.types[namedParameter]) { - queryParameter = BigQuery.valueToQueryParameter_( - value, - query.types[namedParameter] - ); - } else { - queryParameter = BigQuery.valueToQueryParameter_(value); - } - } else { - queryParameter = BigQuery.valueToQueryParameter_(value); - } - - queryParameter.name = namedParameter; - query.queryParameters.push(queryParameter); - } - } else { - query.queryParameters = []; - - if (query.types) { - if (!is.array(query.types)) { - throw new Error( - 'Provided types must match the value type passed to `params`' - ); - } - - if (query.params.length !== query.types.length) { - throw new Error('Incorrect number of parameter types provided.'); - } - query.params.forEach((value: {}, i: number) => { - const queryParameter = BigQuery.valueToQueryParameter_( - value, - query.types[i] - ); - query.queryParameters.push(queryParameter); - }); - } else { - query.params.forEach((value: {}) => { - const queryParameter = BigQuery.valueToQueryParameter_(value); - query.queryParameters.push(queryParameter); - }); - } - } - delete query.params; - } + const {parameterMode, params} = this.buildQueryParams_( + query.params, + query.types + ); + query.parameterMode = parameterMode; + query.queryParameters = params; + delete query.params; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const reqOpts: any = { - configuration: { - query, - }, + const reqOpts: JobOptions = {}; + reqOpts.configuration = { + query, }; if (typeof query.jobTimeoutMs === 'number') { - reqOpts.configuration.jobTimeoutMs = query.jobTimeoutMs; + reqOpts.configuration.jobTimeoutMs = query.jobTimeoutMs.toString(); delete query.jobTimeoutMs; } @@ -1565,6 +1535,85 @@ export class BigQuery extends Service { this.createJob(reqOpts, callback!); } + private buildQueryParams_( + params: Query['params'], + types: Query['types'] + ): { + parameterMode: ParameterMode; + params: bigquery.IQueryParameter[] | undefined; + } { + if (!params) { + return { + parameterMode: undefined, + params: undefined, + }; + } + const parameterMode = is.array(params) ? 'positional' : 'named'; + const queryParameters: bigquery.IQueryParameter[] = []; + if (parameterMode === 'named') { + const namedParams = params as {[param: string]: any}; + for (const namedParameter of Object.getOwnPropertyNames(namedParams)) { + const value = namedParams[namedParameter]; + let queryParameter; + + if (types) { + if (!is.object(types)) { + throw new Error( + 'Provided types must match the value type passed to `params`' + ); + } + + const namedTypes = types as QueryParamTypeStruct; + + if (namedTypes[namedParameter]) { + queryParameter = BigQuery.valueToQueryParameter_( + value, + namedTypes[namedParameter] + ); + } else { + queryParameter = BigQuery.valueToQueryParameter_(value); + } + } else { + queryParameter = BigQuery.valueToQueryParameter_(value); + } + + queryParameter.name = namedParameter; + queryParameters.push(queryParameter); + } + } else { + if (types) { + if (!is.array(types)) { + throw new Error( + 'Provided types must match the value type passed to `params`' + ); + } + + const positionalTypes = types as QueryParamTypeStruct[]; + + if (params.length !== types.length) { + throw new Error('Incorrect number of parameter types provided.'); + } + params.forEach((value: {}, i: number) => { + const queryParameter = BigQuery.valueToQueryParameter_( + value, + positionalTypes[i] + ); + queryParameters.push(queryParameter); + }); + } else { + params.forEach((value: {}) => { + const queryParameter = BigQuery.valueToQueryParameter_(value); + queryParameters.push(queryParameter); + }); + } + } + + return { + parameterMode, + params: queryParameters, + }; + } + /** * Creates a job. Typically when creating a job you'll have a very specific * task in mind. For this we recommend one of the following methods: @@ -2114,22 +2163,178 @@ export class BigQuery extends Service { : {}; const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.createQueryJob(query, (err, job, resp) => { + + this.trace_('[query]', query, options); + const queryReq = this.buildQueryRequest_(query, options); + this.trace_('[query] queryReq', queryReq); + if (!queryReq) { + this.createQueryJob(query, (err, job, resp) => { + if (err) { + (callback as SimpleQueryRowsCallback)(err, null, resp); + return; + } + if (typeof query === 'object' && query.dryRun) { + (callback as SimpleQueryRowsCallback)(null, [], resp); + return; + } + // The Job is important for the `queryAsStream_` method, so a new query + // isn't created each time results are polled for. + options = extend({job}, queryOpts, options); + job!.getQueryResults(options, callback as QueryRowsCallback); + }); + return; + } + + this.runJobsQuery(queryReq, (err, job, res) => { + this.trace_('[runJobsQuery callback]: ', query, err, job, res); if (err) { - (callback as SimpleQueryRowsCallback)(err, null, resp); + (callback as SimpleQueryRowsCallback)(err, null, res); return; } - if (typeof query === 'object' && query.dryRun) { - (callback as SimpleQueryRowsCallback)(null, [], resp); + + options = extend({job}, queryOpts, options); + if (res && res.jobComplete) { + let rows: any = []; + if (res.schema && res.rows) { + rows = BigQuery.mergeSchemaWithRows_(res.schema, res.rows, { + wrapIntegers: options.wrapIntegers || false, + parseJSON: options.parseJSON, + }); + } + this.trace_('[runJobsQuery] job complete'); + options._cachedRows = rows; + if (res.pageToken) { + this.trace_('[runJobsQuery] has more pages'); + options.pageToken = res.pageToken; + } else { + this.trace_('[runJobsQuery] no more pages'); + } + job!.getQueryResults(options, callback as QueryRowsCallback); return; } - // The Job is important for the `queryAsStream_` method, so a new query - // isn't created each time results are polled for. - options = extend({job}, queryOpts, options); + delete options.timeoutMs; + this.trace_('[runJobsQuery] job not complete'); job!.getQueryResults(options, callback as QueryRowsCallback); }); } + /** + * Check if the given Query can run using the `jobs.query` endpoint. + * Returns a bigquery.IQueryRequest that can be used to call `jobs.query`. + * Return undefined if is not possible to convert to a bigquery.IQueryRequest. + * + * @param query string | Query + * @param options QueryOptions + * @returns bigquery.IQueryRequest | undefined + */ + private buildQueryRequest_( + query: string | Query, + options: QueryOptions + ): bigquery.IQueryRequest | undefined { + if (process.env.FAST_QUERY_PATH === 'DISABLED') { + return undefined; + } + const queryObj: Query = + typeof query === 'string' + ? { + query: query, + } + : query; + this.trace_('[buildQueryRequest]', query, options, queryObj); + // This is a denylist of settings which prevent us from composing an equivalent + // bq.QueryRequest due to differences between configuration parameters accepted + // by jobs.insert vs jobs.query. + if ( + !!queryObj.destination || + !!queryObj.tableDefinitions || + !!queryObj.createDisposition || + !!queryObj.writeDisposition || + (!!queryObj.priority && queryObj.priority !== 'INTERACTIVE') || + queryObj.useLegacySql || + !!queryObj.maximumBillingTier || + !!queryObj.timePartitioning || + !!queryObj.rangePartitioning || + !!queryObj.clustering || + !!queryObj.destinationEncryptionConfiguration || + !!queryObj.schemaUpdateOptions || + !!queryObj.jobTimeoutMs || + // User has defined the jobID generation behavior + !!queryObj.jobId + ) { + return undefined; + } + + if (queryObj.dryRun) { + return undefined; + } + + if (options.job) { + return undefined; + } + const req: bigquery.IQueryRequest = { + useQueryCache: queryObj.useQueryCache, + labels: queryObj.labels, + defaultDataset: queryObj.defaultDataset, + createSession: queryObj.createSession, + maximumBytesBilled: queryObj.maximumBytesBilled, + timeoutMs: options.timeoutMs, + location: queryObj.location || options.location, + formatOptions: { + useInt64Timestamp: true, + }, + maxResults: queryObj.maxResults || options.maxResults, + query: queryObj.query, + useLegacySql: false, + requestId: uuid.v4(), + jobCreationMode: 'JOB_CREATION_OPTIONAL', + }; + if (!this._enableQueryPreview) { + delete req.jobCreationMode; + } + const {parameterMode, params} = this.buildQueryParams_( + queryObj.params, + queryObj.types + ); + if (params) { + req.queryParameters = params; + } + if (parameterMode) { + req.parameterMode = parameterMode; + } + return req; + } + + private runJobsQuery( + req: bigquery.IQueryRequest, + callback?: JobsQueryCallback + ): void | Promise { + this.trace_('[runJobsQuery]', req, callback); + this.request( + { + method: 'POST', + uri: '/queries', + json: req, + }, + async (err, res: bigquery.IQueryResponse) => { + this.trace_('jobs.query res:', res, err); + if (err) { + callback!(err, null, res); + return; + } + let job: Job | null = null; + if (res.jobReference) { + const jobRef = res.jobReference; + job = this.job(jobRef.jobId!, { + location: jobRef.location, + }); + } else if (res.queryId) { + job = this.job(res.queryId); // stateless query + } + callback!(null, job, res); + } + ); + } + /** * This method will be called by `createQueryStream()`. It is required to * properly set the `autoPaginate` option value. @@ -2161,6 +2366,8 @@ export class BigQuery extends Service { this.query(query, opts, callback); } + + static setLogFunction = setLogFunction; } /*! Developer Documentation diff --git a/src/job.ts b/src/job.ts index 5da91920..a950d1ed 100644 --- a/src/job.ts +++ b/src/job.ts @@ -39,6 +39,7 @@ import { } from './bigquery'; import {RowMetadata} from './table'; import bigquery from './types'; +import {logger} from './logger'; export type JobMetadata = bigquery.IJob; export type JobOptions = JobRequest; @@ -50,7 +51,12 @@ export type QueryResultsOptions = { job?: Job; wrapIntegers?: boolean | IntegerTypeCastOptions; parseJSON?: boolean; -} & PagedRequest; +} & PagedRequest & { + /** + * internal properties + */ + _cachedRows?: any[]; + }; /** * @callback QueryResultsCallback @@ -379,6 +385,11 @@ class Job extends Operation { ); } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace_(msg: string, ...otherArgs: any[]) { + logger(`[job][${this.id}]`, msg, ...otherArgs); + } + /** * @callback CancelCallback * @param {?Error} err Request error, if any. @@ -536,6 +547,12 @@ class Job extends Operation { }, options ); + this.trace_( + '[getQueryResults]', + this.id, + options.pageToken, + options.startIndex + ); const wrapIntegers = qs.wrapIntegers ? qs.wrapIntegers : false; delete qs.wrapIntegers; @@ -547,6 +564,18 @@ class Job extends Operation { const timeoutOverride = typeof qs.timeoutMs === 'number' ? qs.timeoutMs : false; + if (options._cachedRows) { + let nextQuery: QueryResultsOptions | null = null; + if (options.pageToken) { + nextQuery = Object.assign({}, options, { + pageToken: options.pageToken, + }); + delete nextQuery._cachedRows; + } + callback!(null, options._cachedRows, nextQuery); + return; + } + this.bigQuery.request( { uri: '/queries/' + this.id, @@ -582,6 +611,7 @@ class Job extends Operation { return; } } else if (resp.pageToken) { + this.trace_('[getQueryResults] has more pages', resp.pageToken); // More results exist. nextQuery = Object.assign({}, options, { pageToken: resp.pageToken, diff --git a/src/logger.ts b/src/logger.ts new file mode 100644 index 00000000..d8dfc017 --- /dev/null +++ b/src/logger.ts @@ -0,0 +1,47 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as util from 'util'; + +/*! The external function used to emit logs. */ +let logFunction: ((msg: string) => void) | null = null; + +/** + * Log function to use for debug output. By default, we don't perform any + * logging. + * + * @private + * @internal + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function logger(source: string, msg: string, ...otherArgs: any[]) { + if (logFunction) { + const time = new Date().toISOString(); + const formattedMsg = util.format( + `D ${time} | ${source} | ${msg} |`, + ...otherArgs + ); + logFunction(formattedMsg); + } +} + +/** + * Sets or disables the log function for all active BigQuery instances. + * + * @param logger A log function that takes a message (such as `console.log`) or + * `null` to turn off logging. + */ +export function setLogFunction(logger: ((msg: string) => void) | null): void { + logFunction = logger; +} diff --git a/src/types.d.ts b/src/types.d.ts index ae22e688..ffedd554 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -931,7 +931,7 @@ declare namespace bigquery { */ etag?: string; /** - * Optional. Information about the external metadata storage where the dataset is defined. Filled out when the dataset type is EXTERNAL. + * Optional. Reference to a read-only external dataset defined in data catalogs outside of BigQuery. Filled out when the dataset type is EXTERNAL. */ externalDatasetReference?: IExternalDatasetReference; /** @@ -970,6 +970,10 @@ declare namespace bigquery { * Optional. Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days). The default value is 168 hours if this is not set. */ maxTimeTravelHours?: string; + /** + * Output only. Reserved for future use. + */ + satisfiesPzi?: boolean; /** * Output only. Reserved for future use. */ @@ -1927,8 +1931,8 @@ declare namespace bigquery { | 'ESTIMATED_PERFORMANCE_GAIN_TOO_LOW' | 'NOT_SUPPORTED_IN_STANDARD_EDITION' | 'INDEX_SUPPRESSED_BY_FUNCTION_OPTION' - | 'INTERNAL_ERROR' | 'QUERY_CACHE_HIT' + | 'INTERNAL_ERROR' | 'OTHER_REASON'; /** * Specifies the name of the unused search index, if available. @@ -2212,6 +2216,10 @@ declare namespace bigquery { * Optional. Connection properties which can modify the load job behavior. Currently, only the 'session_id' connection property is supported, and is used to resolve _SESSION appearing as the dataset id. */ connectionProperties?: Array; + /** + * Optional. [Experimental] Configures the load job to only copy files to the destination BigLake managed table with an external storage_uri, without reading file content and writing them to new files. Copying files only is supported when: * source_uris are in the same external storage system as the destination table but they do not overlap with storage_uri of the destination table. * source_format is the same file format as the destination table. * destination_table is an existing BigLake managed table. Its schema does not have default value expression. It schema does not have type parameters other than precision and scale. * No options other than the above are specified. + */ + copyFilesOnly?: boolean; /** * Optional. Specifies whether the job is allowed to create new tables. The following values are supported: * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. * CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion. */ @@ -2880,7 +2888,7 @@ declare namespace bigquery { */ undeclaredQueryParameters?: Array; /** - * Output only. Search query specific statistics. + * Output only. Vector Search query specific statistics. */ vectorSearchStatistics?: IVectorSearchStatistics; }; diff --git a/test/bigquery.ts b/test/bigquery.ts index 8efa158d..ebebb722 100644 --- a/test/bigquery.ts +++ b/test/bigquery.ts @@ -40,6 +40,8 @@ import { Table, JobOptions, TableField, + Query, + QueryResultsOptions, } from '../src'; import {SinonStub} from 'sinon'; import {PreciseDate} from '@google-cloud/precise-date'; @@ -187,6 +189,7 @@ describe('BigQuery', () => { Object.assign(fakeUtil, originalFakeUtil); BigQuery = Object.assign(BigQuery, BigQueryCached); bq = new BigQuery({projectId: PROJECT_ID}); + bq._enableQueryPreview = true; }); after(() => { @@ -636,6 +639,38 @@ describe('BigQuery', () => { }); }); + it('should parse uint64 timestamps with nanosecond precision', () => { + const SCHEMA_OBJECT = { + fields: [{name: 'ts', type: 'TIMESTAMP'}], + } as {fields: TableField[]}; + + sandbox.restore(); // restore BigQuery.timestamp call + + const rows = { + raw: [ + {f: [{v: '-604800000000'}]}, // negative value + {f: [{v: '0'}]}, // 0 value + {f: [{v: '1000000'}]}, // 1 sec after epoch + {f: [{v: '1712609904434123'}]}, // recent time + ], + expectedParsed: [ + {ts: BigQuery.timestamp('1969-12-25T00:00:00.000Z')}, + {ts: BigQuery.timestamp('1970-01-01T00:00:00Z')}, + {ts: BigQuery.timestamp('1970-01-01T00:00:01Z')}, + {ts: BigQuery.timestamp('2024-04-08T20:58:24.434123Z')}, + ], + }; + + const mergedRows = BigQuery.mergeSchemaWithRows_( + SCHEMA_OBJECT, + rows.raw, + {} + ); + mergedRows.forEach((mergedRow: {}, i: number) => { + assert.deepStrictEqual(mergedRow, rows.expectedParsed[i]); + }); + }); + it('should wrap integers with option', () => { const wrapIntegersBoolean = true; const wrapIntegersObject = {}; @@ -2105,10 +2140,10 @@ describe('BigQuery', () => { it('should set the correct query parameters', done => { const queryParameter = {}; - BigQuery.valueToQueryParameter_ = (value: {}) => { + sandbox.replace(BigQuery, 'valueToQueryParameter_', (value: {}) => { assert.strictEqual(value, NAMED_PARAMS.key); return queryParameter; - }; + }); bq.createJob = (reqOpts: JobOptions) => { const query = reqOpts.configuration!.query!; @@ -2129,14 +2164,15 @@ describe('BigQuery', () => { it('should allow for optional parameter types', () => { const queryParameter = {}; - BigQuery.valueToQueryParameter_ = ( - value: {}, - providedType: string - ) => { - assert.strictEqual(value, NAMED_PARAMS.key); - assert.strictEqual(providedType, NAMED_TYPES.key); - return queryParameter; - }; + sandbox.replace( + BigQuery, + 'valueToQueryParameter_', + (value: {}, providedType: string) => { + assert.strictEqual(value, NAMED_PARAMS.key); + assert.strictEqual(providedType, NAMED_TYPES.key); + return queryParameter; + } + ); bq.createJob = (reqOpts: JobOptions) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any assert.strictEqual((reqOpts as any).params, undefined); @@ -2155,10 +2191,10 @@ describe('BigQuery', () => { it('should allow for providing only some parameter types', () => { const queryParameter = {}; - BigQuery.valueToQueryParameter_ = (value: {}) => { + sandbox.replace(BigQuery, 'valueToQueryParameter_', (value: {}) => { assert.strictEqual(value, NAMED_PARAMS.key); return queryParameter; - }; + }); bq.createJob = (reqOpts: JobOptions) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -2190,9 +2226,9 @@ describe('BigQuery', () => { it('should set the correct parameter mode', done => { const queryParameter = {}; - BigQuery.valueToQueryParameter_ = () => { + sandbox.replace(BigQuery, 'valueToQueryParameter_', (value: {}) => { return queryParameter; - }; + }); bq.createJob = (reqOpts: JobOptions) => { const query = reqOpts.configuration!.query!; @@ -2212,10 +2248,10 @@ describe('BigQuery', () => { it('should set the correct query parameters', done => { const queryParameter = {}; - BigQuery.valueToQueryParameter_ = (value: {}) => { + sandbox.replace(BigQuery, 'valueToQueryParameter_', (value: {}) => { assert.strictEqual(value, POSITIONAL_PARAMS[0]); return queryParameter; - }; + }); bq.createJob = (reqOpts: JobOptions) => { const query = reqOpts.configuration!.query!; @@ -2405,7 +2441,7 @@ describe('BigQuery', () => { ); assert.strictEqual( reqOpts.configuration!.jobTimeoutMs, - options.jobTimeoutMs + `${options.jobTimeoutMs}` ); done(); }; @@ -2800,6 +2836,25 @@ describe('BigQuery', () => { callback(error, null, FAKE_RESPONSE); }; + bq.buildQueryRequest_ = (query: {}, options: {}) => { + return undefined; + }; + + bq.query(QUERY_STRING, (err: Error, rows: {}, resp: {}) => { + assert.strictEqual(err, error); + assert.strictEqual(rows, null); + assert.strictEqual(resp, FAKE_RESPONSE); + done(); + }); + }); + + it('should return any errors from jobs.query', done => { + const error = new Error('err'); + + bq.runJobsQuery = (query: {}, callback: Function) => { + callback(error, null, FAKE_RESPONSE); + }; + bq.query(QUERY_STRING, (err: Error, rows: {}, resp: {}) => { assert.strictEqual(err, error); assert.strictEqual(rows, null); @@ -2819,6 +2874,10 @@ describe('BigQuery', () => { callback(null, null, FAKE_RESPONSE); }; + bq.buildQueryRequest_ = (query: {}, options: {}) => { + return undefined; + }; + bq.query(options, (err: Error, rows: {}, resp: {}) => { assert.ifError(err); assert.deepStrictEqual(rows, []); @@ -2838,6 +2897,10 @@ describe('BigQuery', () => { callback(null, fakeJob, FAKE_RESPONSE); }; + bq.buildQueryRequest_ = (query: {}, options: {}) => { + return undefined; + }; + bq.query(QUERY_STRING, (err: Error, rows: {}, resp: {}) => { assert.ifError(err); assert.strictEqual(rows, FAKE_ROWS); @@ -2846,6 +2909,41 @@ describe('BigQuery', () => { }); }); + it('should call job#getQueryResults with cached rows from jobs.query', done => { + const fakeJob = { + getQueryResults: (options: QueryResultsOptions, callback: Function) => { + callback(null, options._cachedRows, FAKE_RESPONSE); + }, + }; + + bq.runJobsQuery = (query: {}, callback: Function) => { + callback(null, fakeJob, { + jobComplete: true, + schema: { + fields: [{name: 'value', type: 'INT64'}], + }, + rows: [{f: [{v: 1}]}, {f: [{v: 2}]}, {f: [{v: 3}]}], + }); + }; + + bq.query(QUERY_STRING, (err: Error, rows: {}, resp: {}) => { + assert.ifError(err); + assert.deepStrictEqual(rows, [ + { + value: 1, + }, + { + value: 2, + }, + { + value: 3, + }, + ]); + assert.strictEqual(resp, FAKE_RESPONSE); + done(); + }); + }); + it('should call job#getQueryResults with query options', done => { let queryResultsOpts = {}; const fakeJob = { @@ -2859,6 +2957,10 @@ describe('BigQuery', () => { callback(null, fakeJob, FAKE_RESPONSE); }; + bq.buildQueryRequest_ = (query: {}, options: {}) => { + return undefined; + }; + const query = { query: QUERY_STRING, wrapIntegers: true, @@ -2889,6 +2991,10 @@ describe('BigQuery', () => { callback(null, fakeJob, FAKE_RESPONSE); }; + bq.buildQueryRequest_ = (query: {}, opts: {}) => { + return undefined; + }; + bq.query(QUERY_STRING, assert.ifError); }); @@ -2906,10 +3012,151 @@ describe('BigQuery', () => { callback(null, fakeJob, FAKE_RESPONSE); }; + bq.buildQueryRequest_ = (query: {}, opts: {}) => { + return undefined; + }; + bq.query(QUERY_STRING, fakeOptions, assert.ifError); }); }); + describe('buildQueryRequest_', () => { + const DATASET_ID = 'dataset-id'; + const TABLE_ID = 'table-id'; + const QUERY_STRING = 'SELECT * FROM [dataset.table]'; + + it('should create a QueryRequest from a Query interface', () => { + const q: Query = { + query: QUERY_STRING, + maxResults: 10, + defaultDataset: { + projectId: PROJECT_ID, + datasetId: DATASET_ID, + }, + priority: 'INTERACTIVE', + params: { + key: 'value', + }, + maximumBytesBilled: '1024', + labels: { + key: 'value', + }, + }; + const req = bq.buildQueryRequest_(q, {}); + for (const key in req) { + if (req[key] === undefined) { + delete req[key]; + } + } + const expectedReq = { + query: QUERY_STRING, + useLegacySql: false, + requestId: req.requestId, + maxResults: 10, + defaultDataset: { + projectId: PROJECT_ID, + datasetId: DATASET_ID, + }, + parameterMode: 'named', + queryParameters: [ + { + name: 'key', + parameterType: { + type: 'STRING', + }, + parameterValue: { + value: 'value', + }, + }, + ], + maximumBytesBilled: '1024', + labels: { + key: 'value', + }, + jobCreationMode: 'JOB_CREATION_OPTIONAL', + formatOptions: { + useInt64Timestamp: true, + }, + }; + assert.deepStrictEqual(req, expectedReq); + }); + + it('should create a QueryRequest from a SQL string', () => { + const req = bq.buildQueryRequest_(QUERY_STRING, {}); + for (const key in req) { + if (req[key] === undefined) { + delete req[key]; + } + } + const expectedReq = { + query: QUERY_STRING, + useLegacySql: false, + requestId: req.requestId, + jobCreationMode: 'JOB_CREATION_OPTIONAL', + formatOptions: { + useInt64Timestamp: true, + }, + }; + assert.deepStrictEqual(req, expectedReq); + }); + + it('should not create a QueryRequest when config is not accepted by jobs.query', () => { + const dataset: any = { + bigQuery: bq, + id: 'dataset-id', + createTable: util.noop, + }; + const table = new FakeTable(dataset, TABLE_ID); + const testCases: Query[] = [ + { + query: QUERY_STRING, + dryRun: true, + }, + { + query: QUERY_STRING, + destination: table, + }, + { + query: QUERY_STRING, + clustering: { + fields: ['date'], + }, + }, + { + query: QUERY_STRING, + clustering: {}, + }, + { + query: QUERY_STRING, + timePartitioning: {}, + }, + { + query: QUERY_STRING, + rangePartitioning: {}, + }, + { + query: QUERY_STRING, + jobId: 'fixed-job-id', + }, + { + query: QUERY_STRING, + createDisposition: 'CREATED_IF_NEEDED', + writeDisposition: 'WRITE_APPEND', + }, + { + query: QUERY_STRING, + schemaUpdateOptions: ['update'], + }, + ]; + + for (const index in testCases) { + const testCase = testCases[index]; + const req = bq.buildQueryRequest_(testCase, {}); + assert.equal(req, undefined); + } + }); + }); + describe('queryAsStream_', () => { let queryStub: SinonStub; const defaultOpts = {