diff --git a/.gitignore b/.gitignore index 604f64ee0..126f263f1 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ system-test/*key.json package-lock.json __pycache__ .idea +.vscode \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 57cde8d6c..a056fc1bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://www.npmjs.com/package/nodejs-spanner?activeTab=versions +## [7.5.0](https://github.com/googleapis/nodejs-spanner/compare/v7.4.0...v7.5.0) (2024-03-04) + + +### Features + +* **spanner:** Add emulator support for the admin client autogenerated API samples ([#1994](https://github.com/googleapis/nodejs-spanner/issues/1994)) ([e2fe5b7](https://github.com/googleapis/nodejs-spanner/commit/e2fe5b748c3077078fa43e4bfa427fef603656a9)) + + +### Bug Fixes + +* Revert untyped param type feature ([#2012](https://github.com/googleapis/nodejs-spanner/issues/2012)) ([49fa60d](https://github.com/googleapis/nodejs-spanner/commit/49fa60dd0735fe66db33f7b9137dba0821eb5184)) + ## [7.4.0](https://github.com/googleapis/nodejs-spanner/compare/v7.3.0...v7.4.0) (2024-02-23) diff --git a/package.json b/package.json index 51102ffaf..0119df72f 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@google-cloud/spanner", "description": "Cloud Spanner Client Library for Node.js", - "version": "7.4.0", + "version": "7.5.0", "license": "Apache-2.0", "author": "Google Inc.", "engines": { @@ -99,7 +99,7 @@ "concat-stream": "^2.0.0", "dedent": "^1.0.0", "execa": "^5.0.0", - "gapic-tools": "^0.3.0", + "gapic-tools": "^0.4.0", "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^3.0.0", diff --git a/samples/README.md b/samples/README.md index f4097374e..f04db33ce 100644 --- a/samples/README.md +++ b/samples/README.md @@ -663,7 +663,7 @@ View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/sa __Usage:__ -`node get-instance-config.js ` +`node get-instance-config.js ` ----- diff --git a/samples/add-and-drop-new-database-role.js b/samples/add-and-drop-new-database-role.js index 7423430af..100c2c55b 100644 --- a/samples/add-and-drop-new-database-role.js +++ b/samples/add-and-drop-new-database-role.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,19 +30,18 @@ function main( // const instanceId = 'my-instance'; // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function addAndDropNewDatabaseRole() { - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function addAndDropNewDatabaseRole() { // Creates a new user defined role and grant permissions try { const request = [ @@ -51,7 +50,14 @@ function main( 'CREATE ROLE child', 'GRANT ROLE parent TO ROLE child', ]; - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -65,7 +71,14 @@ function main( // A role can't be dropped until all its permissions are revoked. try { const request = ['REVOKE ROLE parent FROM ROLE child', 'DROP ROLE child']; - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -74,8 +87,9 @@ function main( } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } addAndDropNewDatabaseRole(); diff --git a/samples/archived/add-and-drop-new-database-role.js b/samples/archived/add-and-drop-new-database-role.js new file mode 100644 index 000000000..7423430af --- /dev/null +++ b/samples/archived/add-and-drop-new-database-role.js @@ -0,0 +1,89 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Add and drop new database role +// usage: node add-and-drop-new-database-role.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_add_and_drop_database_role] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function addAndDropNewDatabaseRole() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Creates a new user defined role and grant permissions + try { + const request = [ + 'CREATE ROLE parent', + 'GRANT SELECT ON TABLE Singers TO ROLE parent', + 'CREATE ROLE child', + 'GRANT ROLE parent TO ROLE child', + ]; + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Created roles child and parent and granted privileges'); + } catch (err) { + console.error('ERROR:', err); + } + + // Revoke permissions and drop child role. + // A role can't be dropped until all its permissions are revoked. + try { + const request = ['REVOKE ROLE parent FROM ROLE child', 'DROP ROLE child']; + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Revoked privileges and dropped role child'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + } + addAndDropNewDatabaseRole(); + // [END spanner_add_and_drop_database_role] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/backups-cancel.js b/samples/archived/backups-cancel.js new file mode 100644 index 000000000..b33cf26d8 --- /dev/null +++ b/samples/archived/backups-cancel.js @@ -0,0 +1,69 @@ +/** + * Copyright 2020 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function cancelBackup(instanceId, databaseId, backupId, projectId) { + // [START spanner_cancel_backup_create] + // Imports the Google Cloud client library and precise date library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const backup = instance.backup(backupId); + + // Creates a new backup of the database + try { + console.log(`Creating backup of database ${database.formattedName_}.`); + const databasePath = database.formattedName_; + // Expire backup one day in the future + const expireTime = Date.now() + 1000 * 60 * 60 * 24; + const [, operation] = await backup.create({ + databasePath: databasePath, + expireTime: expireTime, + }); + + // Cancel the backup + await operation.cancel(); + + console.log('Backup cancelled.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Delete backup in case it got created before the cancel operation + await backup.delete(); + + // Close the database when finished. + await database.close(); + } + // [END spanner_cancel_backup_create] +} + +module.exports.cancelBackup = cancelBackup; diff --git a/samples/archived/backups-copy.js b/samples/archived/backups-copy.js new file mode 100644 index 000000000..9636be972 --- /dev/null +++ b/samples/archived/backups-copy.js @@ -0,0 +1,97 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Copies a source backup +// usage: node spannerCopyBackup + +'use strict'; + +function main( + instanceId = 'my-instance', + backupId = 'my-backup', + sourceBackupPath = 'projects/my-project-id/instances/my-source-instance/backups/my-source-backup', + projectId = 'my-project-id' +) { + // [START spanner_copy_backup] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const backupId = 'my-backup', + // const sourceBackupPath = 'projects/my-project-id/instances/my-source-instance/backups/my-source-backup', + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + const {PreciseDate} = require('@google-cloud/precise-date'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function spannerCopyBackup() { + // Gets a reference to a Cloud Spanner instance and backup + const instance = spanner.instance(instanceId); + + // Expire copy backup 14 days in the future + const expireTime = Spanner.timestamp( + Date.now() + 1000 * 60 * 60 * 24 * 14 + ).toStruct(); + + // Copy the source backup + try { + console.log(`Creating copy of the source backup ${sourceBackupPath}.`); + const [, operation] = await instance.copyBackup( + sourceBackupPath, + backupId, + { + expireTime: expireTime, + } + ); + + console.log( + `Waiting for backup copy ${ + instance.backup(backupId).formattedName_ + } to complete...` + ); + await operation.promise(); + + // Verify the copy backup is ready + const copyBackup = instance.backup(backupId); + const [copyBackupInfo] = await copyBackup.getMetadata(); + if (copyBackupInfo.state === 'READY') { + console.log( + `Backup copy ${copyBackupInfo.name} of size ` + + `${copyBackupInfo.sizeBytes} bytes was created at ` + + `${new PreciseDate(copyBackupInfo.createTime).toISOString()} ` + + 'with version time ' + + `${new PreciseDate(copyBackupInfo.versionTime).toISOString()}` + ); + } else { + console.error('ERROR: Copy of backup is not ready.'); + } + } catch (err) { + console.error('ERROR:', err); + } + } + spannerCopyBackup(); + // [END spanner_copy_backup] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/backups-create-with-encryption-key.js b/samples/archived/backups-create-with-encryption-key.js new file mode 100644 index 000000000..96433971e --- /dev/null +++ b/samples/archived/backups-create-with-encryption-key.js @@ -0,0 +1,92 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function createBackupWithEncryptionKey( + instanceId, + databaseId, + backupId, + projectId, + keyName +) { + // [START spanner_create_backup_with_encryption_key] + // Imports the Google Cloud client library and precise date library + const {Spanner} = require('@google-cloud/spanner'); + const {PreciseDate} = require('@google-cloud/precise-date'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + // const versionTime = Date.now() - 1000 * 60 * 60 * 24; // One day ago + // const keyName = + // 'projects/my-project-id/my-region/keyRings/my-key-ring/cryptoKeys/my-key'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const backup = instance.backup(backupId); + + // Creates a new backup of the database + try { + console.log(`Creating backup of database ${database.formattedName_}.`); + const databasePath = database.formattedName_; + // Expire backup 14 days in the future + const expireTime = Date.now() + 1000 * 60 * 60 * 24 * 14; + // Create a backup of the state of the database at the current time. + const [, operation] = await backup.create({ + databasePath: databasePath, + expireTime: expireTime, + encryptionConfig: { + encryptionType: 'CUSTOMER_MANAGED_ENCRYPTION', + kmsKeyName: keyName, + }, + }); + + console.log(`Waiting for backup ${backup.formattedName_} to complete...`); + await operation.promise(); + + // Verify backup is ready + const [backupInfo] = await backup.getMetadata(); + if (backupInfo.state === 'READY') { + console.log( + `Backup ${backupInfo.name} of size ` + + `${backupInfo.sizeBytes} bytes was created at ` + + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + + `using encryption key ${backupInfo.encryptionInfo.kmsKeyVersion}` + ); + } else { + console.error('ERROR: Backup is not ready.'); + } + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + // [END spanner_create_backup_with_encryption_key] +} + +module.exports.createBackupWithEncryptionKey = createBackupWithEncryptionKey; diff --git a/samples/archived/backups-create.js b/samples/archived/backups-create.js new file mode 100644 index 000000000..6a8af90e4 --- /dev/null +++ b/samples/archived/backups-create.js @@ -0,0 +1,88 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function createBackup( + instanceId, + databaseId, + backupId, + projectId, + versionTime +) { + // [START spanner_create_backup] + // Imports the Google Cloud client library and precise date library + const {Spanner} = require('@google-cloud/spanner'); + const {PreciseDate} = require('@google-cloud/precise-date'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + // const versionTime = Date.now() - 1000 * 60 * 60 * 24; // One day ago + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const backup = instance.backup(backupId); + + // Creates a new backup of the database + try { + console.log(`Creating backup of database ${database.formattedName_}.`); + const databasePath = database.formattedName_; + // Expire backup 14 days in the future + const expireTime = Date.now() + 1000 * 60 * 60 * 24 * 14; + // Create a backup of the state of the database at the current time. + const [, operation] = await backup.create({ + databasePath: databasePath, + expireTime: expireTime, + versionTime: versionTime, + }); + + console.log(`Waiting for backup ${backup.formattedName_} to complete...`); + await operation.promise(); + + // Verify backup is ready + const [backupInfo] = await backup.getMetadata(); + if (backupInfo.state === 'READY') { + console.log( + `Backup ${backupInfo.name} of size ` + + `${backupInfo.sizeBytes} bytes was created at ` + + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + + 'for version of database at ' + + `${new PreciseDate(backupInfo.versionTime).toISOString()}` + ); + } else { + console.error('ERROR: Backup is not ready.'); + } + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + // [END spanner_create_backup] +} + +module.exports.createBackup = createBackup; diff --git a/samples/archived/backups-delete.js b/samples/archived/backups-delete.js new file mode 100644 index 000000000..d1e8a2bfb --- /dev/null +++ b/samples/archived/backups-delete.js @@ -0,0 +1,54 @@ +/** + * Copyright 2020 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function deleteBackup(instanceId, databaseId, backupId, projectId) { + // [START spanner_delete_backup] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and backup + const instance = spanner.instance(instanceId); + const backup = instance.backup(backupId); + + // Delete the backup + console.log(`Deleting backup ${backupId}.`); + await backup.delete(); + + // Verify backup no longer exists + const exists = await backup.exists(); + if (exists) { + console.error('Error: backup still exists.'); + } else { + console.log('Backup deleted.'); + } + // [END spanner_delete_backup] +} + +module.exports.deleteBackup = deleteBackup; diff --git a/samples/archived/backups-get-database-operations.js b/samples/archived/backups-get-database-operations.js new file mode 100644 index 000000000..d2c00ca54 --- /dev/null +++ b/samples/archived/backups-get-database-operations.js @@ -0,0 +1,60 @@ +/** + * Copyright 2020 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function getDatabaseOperations(instanceId, projectId) { + // [START spanner_list_database_operations] + // Imports the Google Cloud client library + const {Spanner, protos} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + // List database operations + try { + const [databaseOperations] = await instance.getDatabaseOperations({ + filter: + '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata)', + }); + console.log('Optimize Database Operations:'); + databaseOperations.forEach(databaseOperation => { + const metadata = + protos.google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata.decode( + databaseOperation.metadata.value + ); + console.log( + `Database ${metadata.name} restored from backup is ` + + `${metadata.progress.progressPercent}% optimized.` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_list_database_operations] +} + +module.exports.getDatabaseOperations = getDatabaseOperations; diff --git a/samples/archived/backups-get-operations.js b/samples/archived/backups-get-operations.js new file mode 100644 index 000000000..9369b3e74 --- /dev/null +++ b/samples/archived/backups-get-operations.js @@ -0,0 +1,94 @@ +/** + * Copyright 2020 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function getBackupOperations( + instanceId, + databaseId, + backupId, + projectId +) { + // [START spanner_list_backup_operations] + // Imports the Google Cloud client library + const {Spanner, protos} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + // const instanceId = 'my-instance'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + // List create backup operations + try { + const [backupOperations] = await instance.getBackupOperations({ + filter: + '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) ' + + `AND (metadata.database:${databaseId})`, + }); + console.log('Create Backup Operations:'); + backupOperations.forEach(backupOperation => { + const metadata = + protos.google.spanner.admin.database.v1.CreateBackupMetadata.decode( + backupOperation.metadata.value + ); + console.log( + `Backup ${metadata.name} on database ${metadata.database} is ` + + `${metadata.progress.progressPercent}% complete.` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } + + // List copy backup operations + try { + console.log( + '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) ' + + `AND (metadata.source_backup:${backupId})` + ); + const [backupOperations] = await instance.getBackupOperations({ + filter: + '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) ' + + `AND (metadata.source_backup:${backupId})`, + }); + console.log('Copy Backup Operations:'); + backupOperations.forEach(backupOperation => { + const metadata = + protos.google.spanner.admin.database.v1.CopyBackupMetadata.decode( + backupOperation.metadata.value + ); + console.log( + `Backup ${metadata.name} copied from source backup ${metadata.sourceBackup} is ` + + `${metadata.progress.progressPercent}% complete.` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_list_backup_operations] +} + +module.exports.getBackupOperations = getBackupOperations; diff --git a/samples/archived/backups-get.js b/samples/archived/backups-get.js new file mode 100644 index 000000000..41cf88fee --- /dev/null +++ b/samples/archived/backups-get.js @@ -0,0 +1,115 @@ +/** + * Copyright 2024 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function getBackups(instanceId, databaseId, backupId, projectId) { + // [START spanner_list_backups] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + try { + // List all backups + const [allBackups] = await instance.getBackups(); + console.log('All backups:'); + allBackups.forEach(backup => { + console.log(backup.id); + }); + + // List backups filtered by backup name + const [backupsByName] = await instance.getBackups({ + filter: `Name:${backupId}`, + }); + console.log('Backups matching backup name:'); + backupsByName.forEach(backup => { + console.log(backup.id); + }); + + // List backups expiring within 30 days + const expireTime = new Date(); + expireTime.setDate(expireTime.getDate() + 30); + const [backupsByExpiry] = await instance.getBackups({ + filter: `expire_time < "${expireTime.toISOString()}"`, + }); + console.log('Backups expiring within 30 days:'); + backupsByExpiry.forEach(backup => { + console.log(backup.id); + }); + + // List backups filtered by database name + const [backupsByDbName] = await instance.getBackups({ + filter: `Database:${databaseId}`, + }); + console.log('Backups matching database name:'); + backupsByDbName.forEach(backup => { + console.log(backup.id); + }); + + // List backups filtered by backup size + const [backupsBySize] = await instance.getBackups({ + filter: 'size_bytes > 100', + }); + console.log('Backups filtered by size:'); + backupsBySize.forEach(backup => { + console.log(backup.id); + }); + + // List backups that are ready that were created after a certain time + const createTime = new Date(); + createTime.setDate(createTime.getDate() - 1); + const [backupsByCreateTime] = await instance.getBackups({ + filter: `(state:READY) AND (create_time >= "${createTime.toISOString()}")`, + }); + console.log('Ready backups filtered by create time:'); + backupsByCreateTime.forEach(backup => { + console.log(backup.id); + }); + + // List backups using pagination + let getBackupsOptions = { + pageSize: 3, + gaxOptions: {autoPaginate: false}, + }; + console.log('Get backups paginated:'); + do { + const [backups, nextQuery] = await instance.getBackups(getBackupsOptions); + backups.forEach(backup => { + console.log(backup.id); + }); + getBackupsOptions = nextQuery; + } while (getBackupsOptions); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_list_backups] +} + +module.exports.getBackups = getBackups; diff --git a/samples/archived/backups-restore-with-encryption-key.js b/samples/archived/backups-restore-with-encryption-key.js new file mode 100644 index 000000000..c9ebb4003 --- /dev/null +++ b/samples/archived/backups-restore-with-encryption-key.js @@ -0,0 +1,77 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function restoreBackupWithEncryptionKey( + instanceId, + databaseId, + backupId, + projectId, + keyName +) { + // [START spanner_restore_backup_with_encryption_key] + // Imports the Google Cloud client library and precise date library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + // const keyName = + // 'projects/my-project-id/my-region/keyRings/my-key-ring/cryptoKeys/my-key'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Restore the database + console.log( + `Restoring database ${database.formattedName_} from backup ${backupId}.` + ); + const [, restoreOperation] = await database.restore( + `projects/${projectId}/instances/${instanceId}/backups/${backupId}`, + { + encryptionConfig: { + encryptionType: 'CUSTOMER_MANAGED_ENCRYPTION', + kmsKeyName: keyName, + }, + } + ); + + // Wait for restore to complete + console.log('Waiting for database restore to complete...'); + await restoreOperation.promise(); + + console.log('Database restored from backup.'); + const restoreInfo = await database.getRestoreInfo(); + const [data] = await database.get(); + console.log( + `Database ${restoreInfo.backupInfo.sourceDatabase} was restored ` + + `to ${databaseId} from backup ${restoreInfo.backupInfo.backup} ` + + `using encryption key ${data.metadata.encryptionConfig.kmsKeyName}.` + ); + // [END spanner_restore_backup_with_encryption_key] +} + +module.exports.restoreBackupWithEncryptionKey = restoreBackupWithEncryptionKey; diff --git a/samples/archived/backups-restore.js b/samples/archived/backups-restore.js new file mode 100644 index 000000000..467c7049e --- /dev/null +++ b/samples/archived/backups-restore.js @@ -0,0 +1,64 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function restoreBackup(instanceId, databaseId, backupId, projectId) { + // [START spanner_restore_backup] + // Imports the Google Cloud client library and precise date library + const {Spanner} = require('@google-cloud/spanner'); + const {PreciseDate} = require('@google-cloud/precise-date'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const backupId = 'my-backup'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Restore the database + console.log( + `Restoring database ${database.formattedName_} from backup ${backupId}.` + ); + const [, restoreOperation] = await database.restore( + `projects/${projectId}/instances/${instanceId}/backups/${backupId}` + ); + + // Wait for restore to complete + console.log('Waiting for database restore to complete...'); + await restoreOperation.promise(); + + console.log('Database restored from backup.'); + const restoreInfo = await database.getRestoreInfo(); + console.log( + `Database ${restoreInfo.backupInfo.sourceDatabase} was restored ` + + `to ${databaseId} from backup ${restoreInfo.backupInfo.backup} ` + + 'with version time ' + + `${new PreciseDate(restoreInfo.backupInfo.versionTime).toISOString()}.` + ); + // [END spanner_restore_backup] +} + +module.exports.restoreBackup = restoreBackup; diff --git a/samples/archived/backups-update.js b/samples/archived/backups-update.js new file mode 100644 index 000000000..639513821 --- /dev/null +++ b/samples/archived/backups-update.js @@ -0,0 +1,62 @@ +/** + * Copyright 2020 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function updateBackup(instanceId, backupId, projectId) { + // [START spanner_update_backup] + // Imports the Google Cloud client library and precise date library + const {Spanner} = require('@google-cloud/spanner'); + const {PreciseDate} = require('@google-cloud/precise-date'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const backupId = 'my-backup'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and backup + const instance = spanner.instance(instanceId); + const backup = instance.backup(backupId); + + // Read backup metadata and update expiry time + try { + const currentExpireTime = await backup.getExpireTime(); + const maxExpireTime = backup.metadata.maxExpireTime; + const wantExpireTime = new PreciseDate(currentExpireTime); + wantExpireTime.setDate(wantExpireTime.getDate() + 1); + // New expire time should be less than the max expire time + const min = (currentExpireTime, maxExpireTime) => + currentExpireTime < maxExpireTime ? currentExpireTime : maxExpireTime; + const newExpireTime = new PreciseDate(min(wantExpireTime, maxExpireTime)); + console.log( + `Backup ${backupId} current expire time: ${currentExpireTime.toISOString()}` + ); + console.log(`Updating expire time to ${newExpireTime.toISOString()}`); + await backup.updateExpireTime(newExpireTime); + console.log('Expire time updated.'); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_update_backup] +} + +module.exports.updateBackup = updateBackup; diff --git a/samples/archived/backups.js b/samples/archived/backups.js new file mode 100644 index 000000000..29aeb6117 --- /dev/null +++ b/samples/archived/backups.js @@ -0,0 +1,153 @@ +/** + * Copyright 2024 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const {createBackup} = require('./backups-create'); +const { + createBackupWithEncryptionKey, +} = require('./backups-create-with-encryption-key'); +const {cancelBackup} = require('./backups-cancel'); +const {getBackups} = require('./backups-get'); +const {getBackupOperations} = require('./backups-get-operations'); +const {getDatabaseOperations} = require('./backups-get-database-operations'); +const {updateBackup} = require('./backups-update'); +const {restoreBackup} = require('./backups-restore'); +const { + restoreBackupWithEncryptionKey, +} = require('./backups-restore-with-encryption-key'); +const {deleteBackup} = require('./backups-delete'); + +require('yargs') + .demand(1) + .command( + 'createBackup ', + 'Creates a backup of a Cloud Spanner database.', + {}, + opts => + createBackup( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId, + Date.parse(opts.versionTime) + ) + ) + .command( + 'createBackupWithEncryptionKey ', + 'Creates a backup of a Cloud Spanner database using an encryption key.', + {}, + opts => + createBackupWithEncryptionKey( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId, + opts.keyName + ) + ) + .command( + 'cancelBackup ', + 'Creates and cancels a backup of a Cloud Spanner database.', + {}, + opts => + cancelBackup( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId + ) + ) + .command( + 'getBackups ', + 'Lists backups in the instance with filters.', + {}, + opts => + getBackups( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId + ) + ) + .command( + 'getBackupOperations ', + 'Lists all backup operations in the instance.', + {}, + opts => + getBackupOperations( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId + ) + ) + .command( + 'getDatabaseOperations ', + 'Lists all database operations in the instance.', + {}, + opts => getDatabaseOperations(opts.instanceName, opts.projectId) + ) + .command( + 'updateBackup ', + 'Updates the expire time of a backup.', + {}, + opts => updateBackup(opts.instanceName, opts.backupName, opts.projectId) + ) + .command( + 'restoreBackup ', + 'Restores a Cloud Spanner database from a backup.', + {}, + opts => + restoreBackup( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId + ) + ) + .command( + 'restoreBackupWithEncryptionKey ', + 'Restores a Cloud Spanner database from a backup with an encryption key.', + {}, + opts => + restoreBackupWithEncryptionKey( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId, + opts.keyName + ) + ) + .command( + 'deleteBackup ', + 'Deletes a backup.', + {}, + opts => + deleteBackup( + opts.instanceName, + opts.databaseName, + opts.backupName, + opts.projectId + ) + ) + .example( + 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id"' + ) + .wrap(120) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/spanner/docs') + .strict() + .help().argv; diff --git a/samples/archived/database-create-with-default-leader.js b/samples/archived/database-create-with-default-leader.js new file mode 100644 index 000000000..7f804451c --- /dev/null +++ b/samples/archived/database-create-with-default-leader.js @@ -0,0 +1,88 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Creates a new database with a specific default leader +// usage: node database-create-with-default-leader.js + +'use strict'; + +function main(instanceId, databaseId, defaultLeader, projectId) { + // [START spanner_create_database_with_default_leader] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + // const defaultLeader = 'my-default-leader'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function createDatabaseWithDefaultLeader() { + // Create a new database with an extra statement which will alter the + // database after creation to set the default leader. + console.log(`Creating database ${database.formattedName_}.`); + const createSingersTableStatement = ` + CREATE TABLE Singers ( + SingerId INT64 NOT NULL, + FirstName STRING(1024), + LastName STRING(1024), + SingerInfo BYTES(MAX) + ) PRIMARY KEY (SingerId)`; + const createAlbumsStatement = ` + CREATE TABLE Albums ( + SingerId INT64 NOT NULL, + AlbumId INT64 NOT NULL, + AlbumTitle STRING(MAX) + ) PRIMARY KEY (SingerId, AlbumId), + INTERLEAVE IN PARENT Singers ON DELETE CASCADE`; + + // Default leader is one of the possible values in the leaderOptions field of the + // instance config of the instance where the database is created. + const setDefaultLeaderStatement = ` + ALTER DATABASE \`${databaseId}\` + SET OPTIONS (default_leader = '${defaultLeader}')`; + const [, operation] = await database.create({ + extraStatements: [ + createSingersTableStatement, + createAlbumsStatement, + setDefaultLeaderStatement, + ], + }); + + console.log(`Waiting for creation of ${database.id} to complete...`); + await operation.promise(); + console.log( + `Created database ${databaseId} with default leader ${defaultLeader}.` + ); + } + createDatabaseWithDefaultLeader(); + // [END spanner_create_database_with_default_leader] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/database-create-with-encryption-key.js b/samples/archived/database-create-with-encryption-key.js new file mode 100644 index 000000000..01d677978 --- /dev/null +++ b/samples/archived/database-create-with-encryption-key.js @@ -0,0 +1,71 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function createDatabaseWithEncryptionKey( + instanceId, + databaseId, + projectId, + keyName +) { + // [START spanner_create_database_with_encryption_key] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const keyName = + // 'projects/my-project-id/my-region/keyRings/my-key-ring/cryptoKeys/my-key'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + const request = { + encryptionConfig: { + kmsKeyName: keyName, + }, + }; + + // Creates a database + const [database, operation] = await instance.createDatabase( + databaseId, + request + ); + + console.log(`Waiting for operation on ${database.id} to complete...`); + await operation.promise(); + + console.log(`Created database ${databaseId} on instance ${instanceId}.`); + + // Get encryption key + const [data] = await database.get(); + + console.log( + `Database encrypted with key ${data.metadata.encryptionConfig.kmsKeyName}.` + ); + // [END spanner_create_database_with_encryption_key] +} + +module.exports.createDatabaseWithEncryptionKey = + createDatabaseWithEncryptionKey; diff --git a/samples/archived/database-create-with-version-retention-period.js b/samples/archived/database-create-with-version-retention-period.js new file mode 100644 index 000000000..33cfd5d5e --- /dev/null +++ b/samples/archived/database-create-with-version-retention-period.js @@ -0,0 +1,72 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function createDatabaseWithVersionRetentionPeriod( + instanceId, + databaseId, + projectId +) { + // [START spanner_create_database_with_version_retention_period] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + try { + // Create a new database with an extra statement which will alter the + // database after creation to set the version retention period. + console.log(`Creating database ${instance.formattedName_}.`); + const versionRetentionStatement = ` + ALTER DATABASE \`${databaseId}\` + SET OPTIONS (version_retention_period = '1d')`; + const [, operation] = await database.create({ + extraStatements: [versionRetentionStatement], + }); + + console.log(`Waiting for operation on ${database.id} to complete...`); + await operation.promise(); + console.log(` + Created database ${databaseId} with version retention period.`); + + const [data] = await database.get(); + console.log( + `Version retention period: ${data.metadata.versionRetentionPeriod}` + ); + const earliestVersionTime = Spanner.timestamp( + data.metadata.earliestVersionTime + ); + console.log(`Earliest version time: ${earliestVersionTime}`); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_create_database_with_version_retention_period] +} + +module.exports.createDatabaseWithVersionRetentionPeriod = + createDatabaseWithVersionRetentionPeriod; diff --git a/samples/archived/database-get-ddl.js b/samples/archived/database-get-ddl.js new file mode 100644 index 000000000..65efb3a25 --- /dev/null +++ b/samples/archived/database-get-ddl.js @@ -0,0 +1,58 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Gets the schema definition of an existing database +// usage: node database-get-ddl.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_get_database_ddl] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + // Gets a reference to a Cloud Spanner instance and a database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function getDatabaseDdl() { + // Get the schema definition of the database. + const [ddlStatements] = await database.getSchema(); + + console.log(`Retrieved database DDL for ${database.formattedName_}:`); + ddlStatements.forEach(statement => { + console.log(`${statement};\n`); + }); + } + getDatabaseDdl(); + // [END spanner_get_database_ddl] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/database-get-default-leader.js b/samples/archived/database-get-default-leader.js new file mode 100644 index 000000000..fe11365a9 --- /dev/null +++ b/samples/archived/database-get-default-leader.js @@ -0,0 +1,69 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Gets the default leader option of an existing database +// usage: node database-get-default-leader.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_query_information_schema_database_options] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + // Gets a reference to a Cloud Spanner instance and a database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function getDatabaseDdl() { + // Get the default leader option for the database. + const [rows] = await database.run({ + sql: ` + SELECT s.OPTION_NAME, s.OPTION_VALUE + FROM INFORMATION_SCHEMA.DATABASE_OPTIONS s + WHERE s.OPTION_NAME = 'default_leader'`, + json: true, + }); + if (rows.length > 0) { + const option = rows[0]; + console.log( + `The ${option.OPTION_NAME} for ${databaseId} is ${option.OPTION_VALUE}` + ); + } else { + console.log( + `Database ${databaseId} does not have a value for option 'default_leader'` + ); + } + } + getDatabaseDdl(); + // [END spanner_query_information_schema_database_options] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/database-update-default-leader.js b/samples/archived/database-update-default-leader.js new file mode 100644 index 000000000..62fd80d18 --- /dev/null +++ b/samples/archived/database-update-default-leader.js @@ -0,0 +1,63 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Updates the default leader of an existing database +// usage: node database-update-default-leader.js + +'use strict'; + +function main(instanceId, databaseId, defaultLeader, projectId) { + // [START spanner_update_database_with_default_leader] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + // const defaultLeader = 'my-default-leader'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + // Gets a reference to a Cloud Spanner instance and a database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function updateDatabaseWithDefaultLeader() { + console.log(`Updating database ${database.formattedName_}.`); + const setDefaultLeaderStatement = ` + ALTER DATABASE \`${databaseId}\` + SET OPTIONS (default_leader = '${defaultLeader}')`; + const [operation] = await database.updateSchema(setDefaultLeaderStatement); + + console.log(`Waiting for updating of ${database.id} to complete...`); + await operation.promise(); + console.log( + `Updated database ${databaseId} with default leader ${defaultLeader}.` + ); + } + updateDatabaseWithDefaultLeader(); + // [END spanner_update_database_with_default_leader] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/database-update.js b/samples/archived/database-update.js new file mode 100644 index 000000000..583eda219 --- /dev/null +++ b/samples/archived/database-update.js @@ -0,0 +1,72 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Updates a Cloud Spanner Database. +// usage: node database-update.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_update_database] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function updateDatabase() { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + try { + console.log(`Updating database ${database.id}.`); + const [operation] = await database.setMetadata({ + enableDropProtection: true, + }); + console.log( + `Waiting for update operation for ${database.id} to complete...` + ); + await operation.promise(); + console.log(`Updated database ${database.id}.`); + } catch (err) { + console.log('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + } + updateDatabase(); + // [END spanner_update_database] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/datatypes.js b/samples/archived/datatypes.js new file mode 100644 index 000000000..c3d8acdac --- /dev/null +++ b/samples/archived/datatypes.js @@ -0,0 +1,765 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function createVenuesTable(instanceId, databaseId, projectId) { + // [START spanner_create_table_with_datatypes] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + `CREATE TABLE Venues ( + VenueId INT64 NOT NULL, + VenueName STRING(100), + VenueInfo BYTES(MAX), + Capacity INT64, + AvailableDates ARRAY, + LastContactDate Date, + OutdoorVenue BOOL, + PopularityScore FLOAT64, + LastUpdateTime TIMESTAMP NOT NULL OPTIONS (allow_commit_timestamp=true) + ) PRIMARY KEY (VenueId)`, + ]; + + // Creates a table in an existing database. + const [operation] = await database.updateSchema(request); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + + await operation.promise(); + + console.log(`Created table Venues in database ${databaseId}.`); + // [END spanner_create_table_with_datatypes] +} + +async function insertData(instanceId, databaseId, projectId) { + // [START spanner_insert_datatypes_data] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client. + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Instantiate Spanner table objects. + const venuesTable = database.table('Venues'); + const exampleBytes1 = new Buffer.from('Hello World 1'); + const exampleBytes2 = new Buffer.from('Hello World 2'); + const exampleBytes3 = new Buffer.from('Hello World 3'); + const availableDates1 = ['2020-12-01', '2020-12-02', '2020-12-03']; + const availableDates2 = ['2020-11-01', '2020-11-05', '2020-11-15']; + const availableDates3 = ['2020-10-01', '2020-10-07']; + + // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they + // must be converted to strings before being inserted as INT64s. + const data = [ + { + VenueId: '4', + VenueName: 'Venue 4', + VenueInfo: exampleBytes1, + Capacity: '1800', + AvailableDates: availableDates1, + LastContactDate: '2018-09-02', + OutdoorVenue: false, + PopularityScore: Spanner.float(0.85543), + LastUpdateTime: 'spanner.commit_timestamp()', + }, + { + VenueId: '19', + VenueName: 'Venue 19', + VenueInfo: exampleBytes2, + Capacity: '6300', + AvailableDates: availableDates2, + LastContactDate: '2019-01-15', + OutdoorVenue: true, + PopularityScore: Spanner.float(0.98716), + LastUpdateTime: 'spanner.commit_timestamp()', + }, + { + VenueId: '42', + VenueName: 'Venue 42', + VenueInfo: exampleBytes3, + Capacity: '3000', + AvailableDates: availableDates3, + LastContactDate: '2018-10-01', + OutdoorVenue: false, + PopularityScore: Spanner.float(0.72598), + LastUpdateTime: 'spanner.commit_timestamp()', + }, + ]; + + // Inserts rows into the Venues table. + try { + await venuesTable.insert(data); + console.log('Inserted data.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_insert_datatypes_data] +} + +async function queryWithArray(instanceId, databaseId, projectId) { + // [START spanner_query_with_array_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client. + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'date', + }; + + const parentFieldType = { + type: 'array', + child: fieldType, + }; + + const exampleArray = ['2020-10-01', '2020-11-01']; + + const query = { + sql: `SELECT VenueId, VenueName, AvailableDate FROM Venues v, + UNNEST(v.AvailableDates) as AvailableDate + WHERE AvailableDate in UNNEST(@availableDates)`, + params: { + availableDates: exampleArray, + }, + types: { + availableDates: parentFieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + rows.forEach(row => { + const availableDate = row[2]['value']; + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${ + json.VenueName + }, AvailableDate: ${JSON.stringify(availableDate).substring(1, 11)}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_array_parameter] +} + +async function queryWithBool(instanceId, databaseId, projectId) { + // [START spanner_query_with_bool_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'bool', + }; + + const exampleBool = true; + + const query = { + sql: `SELECT VenueId, VenueName, OutdoorVenue FROM Venues + WHERE OutdoorVenue = @outdoorVenue`, + params: { + outdoorVenue: exampleBool, + }, + types: { + outdoorVenue: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + + ` OutdoorVenue: ${json.OutdoorVenue}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_bool_parameter] +} + +async function queryWithBytes(instanceId, databaseId, projectId) { + // [START spanner_query_with_bytes_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'bytes', + }; + + const exampleBytes = new Buffer.from('Hello World 1'); + + const query = { + sql: `SELECT VenueId, VenueName FROM Venues + WHERE VenueInfo = @venueInfo`, + params: { + venueInfo: exampleBytes, + }, + types: { + venueInfo: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log(`VenueId: ${json.VenueId}, VenueName: ${json.VenueName}`); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_bytes_parameter] +} + +async function queryWithDate(instanceId, databaseId, projectId) { + // [START spanner_query_with_date_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client. + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'date', + }; + + const exampleDate = '2019-01-01'; + + const query = { + sql: `SELECT VenueId, VenueName, LastContactDate FROM Venues + WHERE LastContactDate < @lastContactDate`, + params: { + lastContactDate: exampleDate, + }, + types: { + lastContactDate: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const date = row[2]['value']; + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + + ` LastContactDate: ${JSON.stringify(date).substring(1, 11)}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_date_parameter] +} + +async function queryWithFloat(instanceId, databaseId, projectId) { + // [START spanner_query_with_float_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'float64', + }; + + const exampleFloat = Spanner.float(0.8); + + const query = { + sql: `SELECT VenueId, VenueName, PopularityScore FROM Venues + WHERE PopularityScore > @popularityScore`, + params: { + popularityScore: exampleFloat, + }, + types: { + popularityScore: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + + ` PopularityScore: ${json.PopularityScore}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_float_parameter] +} + +async function queryWithInt(instanceId, databaseId, projectId) { + // [START spanner_query_with_int_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'int64', + }; + + const exampleInt = 3000; + + const query = { + sql: `SELECT VenueId, VenueName, Capacity FROM Venues + WHERE Capacity >= @capacity`, + params: { + capacity: exampleInt, + }, + types: { + capacity: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + + ` Capacity: ${json.Capacity}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_int_parameter] +} + +async function queryWithString(instanceId, databaseId, projectId) { + // [START spanner_query_with_string_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'string', + }; + + const exampleString = 'Venue 42'; + + const query = { + sql: `SELECT VenueId, VenueName FROM Venues + WHERE VenueName = @venueName`, + params: { + venueName: exampleString, + }, + types: { + venueName: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log(`VenueId: ${json.VenueId}, VenueName: ${json.VenueName}`); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_string_parameter] +} + +async function queryWithTimestamp(instanceId, databaseId, projectId) { + // [START spanner_query_with_timestamp_parameter] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client. + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const fieldType = { + type: 'timestamp', + }; + + const exampleTimestamp = new Date().toISOString(); + + const query = { + sql: `SELECT VenueId, VenueName, LastUpdateTime FROM Venues + WHERE LastUpdateTime < @lastUpdateTime`, + params: { + lastUpdateTime: exampleTimestamp, + }, + types: { + lastUpdateTime: fieldType, + }, + }; + + // Queries rows from the Venues table. + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + + ` LastUpdateTime: ${json.LastUpdateTime}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_with_timestamp_parameter] +} + +const {addNumericColumn} = require('./numeric-add-column'); +const {updateWithNumericData} = require('../numeric-update-data'); +const {queryWithNumericParameter} = require('../numeric-query-parameter'); + +const {addJsonColumn} = require('./json-add-column'); +const {updateWithJsonData} = require('../json-update-data'); +const {queryWithJsonParameter} = require('../json-query-parameter'); + +require('yargs') + .demand(1) + .command( + 'createVenuesTable ', + 'Creates sample "Venues" table containing example datatype columns in a Cloud Spanner database.', + {}, + opts => + createVenuesTable(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'insertData ', + 'Inserts new rows of data into an sample "Venues" Cloud Spanner table.', + {}, + opts => insertData(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithArray ', + "Query data from the sample 'Venues' table with an ARRAY datatype.", + {}, + opts => queryWithArray(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithBool ', + "Query data from the sample 'Venues' table with a BOOL datatype.", + {}, + opts => queryWithBool(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithBytes ', + "Query data from the sample 'Venues' table with a BYTES datatype.", + {}, + opts => queryWithBytes(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithDate ', + "Query data from the sample 'Venues' table with a DATE datatype.", + {}, + opts => queryWithDate(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithFloat ', + "Query data from the sample 'Venues' table with a FLOAT64 datatype.", + {}, + opts => queryWithFloat(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithInt ', + "Query data from the sample 'Venues' table with a INT64 datatype.", + {}, + opts => queryWithInt(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithString ', + "Query data from the sample 'Venues' table with a STRING datatype.", + {}, + opts => + queryWithString(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithTimestamp ', + "Query data from the sample 'Venues' table with a TIMESTAMP datatype.", + {}, + opts => + queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'addNumericColumn ', + 'Adds a "Revenue" column to sample "Venues" table in a Cloud Spanner database.', + {}, + opts => + addNumericColumn(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'updateWithNumericData ', + 'Updates rows to include "Revenue" in sample "Venues" Cloud Spanner table.', + {}, + opts => + updateWithNumericData( + opts.instanceName, + opts.databaseName, + opts.projectId + ) + ) + .command( + 'queryWithNumericParameter ', + "Query data from the sample 'Venues' table with a NUMERIC datatype.", + {}, + opts => + queryWithNumericParameter( + opts.instanceName, + opts.databaseName, + opts.projectId + ) + ) + .command( + 'addJsonColumn ', + 'Adds a "VenueDetails" column to sample "Venues" table in a Cloud Spanner database.', + {}, + opts => addJsonColumn(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'updateWithJsonData ', + 'Updates rows to include "VenueDetails" in sample "Venues" Cloud Spanner table.', + {}, + opts => + updateWithJsonData(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithJsonParameter ', + "Query data from the sample 'Venues' table with a JSON datatype.", + {}, + opts => + queryWithJsonParameter( + opts.instanceName, + opts.databaseName, + opts.projectId + ) + ) + .example( + 'node $0 createVenuesTable "my-instance" "my-database" "my-project-id"' + ) + .example('node $0 insertData "my-instance" "my-database" "my-project-id"') + .example('node $0 queryWithArray "my-instance" "my-database" "my-project-id"') + .example('node $0 queryWithBool "my-instance" "my-database" "my-project-id"') + .example('node $0 queryWithBytes "my-instance" "my-database" "my-project-id"') + .example('node $0 queryWithDate "my-instance" "my-database" "my-project-id"') + .example('node $0 queryWithFloat "my-instance" "my-database" "my-project-id"') + .example('node $0 queryWithInt "my-instance" "my-database" "my-project-id"') + .example( + 'node $0 queryWithString "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 addNumericColumn "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 updateWithNumericData "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 queryWithNumericParameter "my-instance" "my-database" "my-project-id"' + ) + .wrap(120) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/spanner/docs') + .strict() + .help().argv; diff --git a/samples/archived/empty b/samples/archived/empty deleted file mode 100644 index 679e43b31..000000000 --- a/samples/archived/empty +++ /dev/null @@ -1 +0,0 @@ -DELETE THIS FILE WHEN MORE FILES ARE ADDED UNDER THIS FOLDER \ No newline at end of file diff --git a/samples/archived/enable-fine-grained-access.js b/samples/archived/enable-fine-grained-access.js new file mode 100644 index 000000000..fd7f38efd --- /dev/null +++ b/samples/archived/enable-fine-grained-access.js @@ -0,0 +1,80 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Enable fine grained access control +// usage: node enable-fine-grained-access.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id', + iamMember = 'user:alice@example.com', + databaseRole = 'parent', + title = 'condition title' +) { + // [START spanner_enable_fine_grained_access] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + // iamMember = 'user:alice@example.com'; + // databaseRole = 'parent'; + // title = 'condition title'; + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function enableFineGrainedAccess() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const [policy] = await database.getIamPolicy({requestedPolicyVersion: 3}); + if (policy.version < 3) { + policy.version = 3; + } + + const newBinding = { + role: 'roles/spanner.fineGrainedAccessUser', + members: [`user:${iamMember}`], + condition: { + title: title, + expression: `resource.name.endsWith("/databaseRoles/${databaseRole}")`, + }, + }; + policy.bindings.push(newBinding); + await database.setIamPolicy({policy: policy}); + // Requested Policy Version is Optional. The maximum policy version that will be used to format the policy. + // Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. + const newPolicy = await database.getIamPolicy({requestedPolicyVersion: 3}); + console.log(newPolicy); + } + enableFineGrainedAccess(); + // [END spanner_enable_fine_grained_access] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/get-database-roles.js b/samples/archived/get-database-roles.js new file mode 100644 index 000000000..99a4451e2 --- /dev/null +++ b/samples/archived/get-database-roles.js @@ -0,0 +1,61 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: List database roles +// usage: node get-database-roles.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_list_database_roles] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function getDatabaseRoles() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Fetching database roles + const [databaseRoles] = await database.getDatabaseRoles(); + console.log(`Roles for Database: ${database.formattedName_}`); + databaseRoles.forEach(role => { + console.log(`Role: ${role.name}`); + }); + } + getDatabaseRoles(); + // [END spanner_list_database_roles] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/get-instance-config.js b/samples/archived/get-instance-config.js new file mode 100644 index 000000000..b769e41c9 --- /dev/null +++ b/samples/archived/get-instance-config.js @@ -0,0 +1,56 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Gets the instance config metadata for the configuration nam6 +// usage: node get-instance-config.js + +'use strict'; + +function main(projectId) { + // [START spanner_get_instance_config] + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const projectId = 'my-project-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function getInstanceConfig() { + // Get the instance config for the multi-region North America 6 (NAM6). + // See https://cloud.google.com/spanner/docs/instance-configurations#configuration for a list of all available + // configurations. + const [instanceConfig] = await spanner.getInstanceConfig('nam6'); + console.log( + `Available leader options for instance config ${instanceConfig.name} ('${ + instanceConfig.displayName + }'): + ${instanceConfig.leaderOptions.join()}` + ); + } + getInstanceConfig(); + // [END spanner_get_instance_config] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/index-create-storing.js b/samples/archived/index-create-storing.js new file mode 100644 index 000000000..72924b874 --- /dev/null +++ b/samples/archived/index-create-storing.js @@ -0,0 +1,73 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates a new value-storing index +// usage: node createStoringIndex + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_create_storing_index] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function createStoringIndex() { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + 'CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) STORING (MarketingBudget)', + ]; + + // Creates a new index in the database + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Added the AlbumsByAlbumTitle2 index.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + } + createStoringIndex(); + // [END spanner_create_storing_index] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/index-create.js b/samples/archived/index-create.js new file mode 100644 index 000000000..b220e8991 --- /dev/null +++ b/samples/archived/index-create.js @@ -0,0 +1,71 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates a new index +// usage: node createIndex + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_create_index] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function createIndex() { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)']; + + // Creates a new index in the database + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Added the AlbumsByAlbumTitle index.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + } + createIndex(); + // [END spanner_create_index] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/instance-config-create.js b/samples/archived/instance-config-create.js new file mode 100644 index 000000000..7ae6cccff --- /dev/null +++ b/samples/archived/instance-config-create.js @@ -0,0 +1,79 @@ +/** + * Copyright 2022 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Creates a user-managed instance configuration. +// usage: node instance-config-create + +'use strict'; + +function main( + instanceConfigId = 'custom-my-instance-config', + baseInstanceConfigId = 'my-base-instance-config', + projectId = 'my-project-id' +) { + // [START spanner_create_instance_config] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const instanceConfigId = 'custom-my-instance-config-id' + // const baseInstanceConfigId = 'my-base-instance-config-id'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + async function createInstanceConfig() { + // Creates a new instance config + const instanceConfig = spanner.instanceConfig(instanceConfigId); + try { + const [baseInstanceConfig] = + await spanner.getInstanceConfig(baseInstanceConfigId); + console.log(`Creating instance config ${instanceConfig.formattedName_}.`); + const [, operation] = await instanceConfig.create({ + displayName: instanceConfigId, + baseConfig: baseInstanceConfig.name, + replicas: baseInstanceConfig.replicas.concat( + baseInstanceConfig.optionalReplicas[0] + ), + }); + console.log( + `Waiting for create operation for ${instanceConfig.id} to complete...` + ); + await operation.promise(); + console.log(`Created instance config ${instanceConfigId}.`); + } catch (err) { + console.error( + 'ERROR: Creating instance config ', + instanceConfigId, + ' failed with error message ', + err + ); + } + } + createInstanceConfig(); + // [END spanner_create_instance_config] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/instance-config-delete.js b/samples/archived/instance-config-delete.js new file mode 100644 index 000000000..d28bec969 --- /dev/null +++ b/samples/archived/instance-config-delete.js @@ -0,0 +1,76 @@ +/** + * Copyright 2022 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Deletes a user-managed instance configuration. +// usage: node instance-config-delete + +'use strict'; + +function main( + instanceConfigId = 'custom-my-instance-config', + projectId = 'my-project-id' +) { + // [START spanner_delete_instance_config] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const instanceConfigId = 'custom-my-instance-config-id'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + async function deleteInstanceConfig() { + // Deletes an instance config. + const instanceConfig = spanner.instanceConfig(instanceConfigId); + try { + // Delete the instance config. + console.log(`Deleting ${instanceConfig.id}...\n`); + await instanceConfig.delete(); + // Verify that the instance config no longer exists + const exists = await instanceConfig.exists(); + if (exists) { + console.error( + 'Error: Instance config ', + instanceConfigId, + ' still exists' + ); + } else { + console.log(`Deleted instance config ${instanceConfigId}.\n`); + } + } catch (err) { + console.error( + 'ERROR: Deleting instance config ', + instanceConfigId, + ' failed with error message ', + err + ); + } + } + deleteInstanceConfig(); + // [END spanner_delete_instance_config] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/instance-config-get-operations.js b/samples/archived/instance-config-get-operations.js new file mode 100644 index 000000000..b62e0a439 --- /dev/null +++ b/samples/archived/instance-config-get-operations.js @@ -0,0 +1,76 @@ +/** + * Copyright 2022 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Lists the instance configuration operations. +// usage: node instance-config-get-operations + +'use strict'; + +function main(projectId = 'my-project-id') { + // [START spanner_list_instance_config_operations] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + + // Imports the Google Cloud client library + const {Spanner, protos} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + async function getInstanceConfigOperations() { + // Lists the instance config operations. + try { + console.log( + `Getting list of instance config operations on project ${projectId}...\n` + ); + const [instanceConfigOperations] = + await spanner.getInstanceConfigOperations({ + filter: + '(metadata.@type=type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata)', + }); + console.log( + `Available instance config operations for project ${projectId}:` + ); + instanceConfigOperations.forEach(instanceConfigOperation => { + const metadata = instanceConfigOperation.metadata; + const instanceConfig = + protos.google.spanner.admin.instance.v1.CreateInstanceConfigMetadata.decode( + instanceConfigOperation.metadata.value + ).instanceConfig; + console.log( + `Instance config operation for ${instanceConfig.name} of type` + + ` ${metadata.type_url} has status ${ + instanceConfigOperation.done ? 'done' : 'running' + }.` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } + } + getInstanceConfigOperations(); + // [END spanner_list_instance_config_operations] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/instance-config-update.js b/samples/archived/instance-config-update.js new file mode 100644 index 000000000..acb7dc237 --- /dev/null +++ b/samples/archived/instance-config-update.js @@ -0,0 +1,76 @@ +/** + * Copyright 2022 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Updates a user-managed instance configuration. +// usage: node instance-config-update + +'use strict'; + +function main( + instanceConfigId = 'custom-my-instance-config', + projectId = 'my-project-id' +) { + // [START spanner_update_instance_config] + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const instanceConfigId = 'custom-my-instance-config-id'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + async function updateInstanceConfig() { + // Updates an instance config + const instanceConfig = spanner.instanceConfig(instanceConfigId); + try { + console.log(`Updating instance config ${instanceConfig.formattedName_}.`); + const [operation] = await instanceConfig.setMetadata({ + instanceConfig: { + displayName: 'updated custom instance config', + labels: { + updated: 'true', + }, + }, + }); + console.log( + `Waiting for update operation for ${instanceConfig.id} to complete...` + ); + await operation.promise(); + console.log(`Updated instance config ${instanceConfigId}.`); + } catch (err) { + console.error( + 'ERROR: Updating instance config ', + instanceConfigId, + ' failed with error message ', + err + ); + } + } + updateInstanceConfig(); + // [END spanner_update_instance_config] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/instance-with-processing-units.js b/samples/archived/instance-with-processing-units.js new file mode 100644 index 000000000..8d47e87d3 --- /dev/null +++ b/samples/archived/instance-with-processing-units.js @@ -0,0 +1,67 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function createInstanceWithProcessingUnits(instanceId, projectId) { + // [START spanner_create_instance_with_processing_units] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + const instance = spanner.instance(instanceId); + + // Creates a new instance + try { + console.log(`Creating instance ${instance.formattedName_}.`); + const [, operation] = await instance.create({ + config: 'regional-us-central1', + processingUnits: 500, + displayName: 'This is a display name.', + labels: { + ['cloud_spanner_samples']: 'true', + }, + }); + + console.log(`Waiting for operation on ${instance.id} to complete...`); + await operation.promise(); + + console.log(`Created instance ${instanceId}.`); + + const [metadata] = await instance.getMetadata({ + fieldNames: ['processingUnits'], + }); + console.log( + `Instance ${instanceId} has ${metadata.processingUnits} ` + + 'processing units.' + ); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_create_instance_with_processing_units] +} + +module.exports.createInstanceWithProcessingUnits = + createInstanceWithProcessingUnits; diff --git a/samples/archived/instance.js b/samples/archived/instance.js new file mode 100644 index 000000000..cd9281488 --- /dev/null +++ b/samples/archived/instance.js @@ -0,0 +1,85 @@ +/** + * Copyright 2020 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +async function createInstance(instanceId, projectId) { + // [START spanner_create_instance] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + const instance = spanner.instance(instanceId); + + // Creates a new instance + try { + console.log(`Creating instance ${instance.formattedName_}.`); + const [, operation] = await instance.create({ + config: 'regional-us-west1', + nodes: 1, + displayName: 'This is a display name.', + labels: { + ['cloud_spanner_samples']: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, + }); + + console.log(`Waiting for operation on ${instance.id} to complete...`); + await operation.promise(); + + console.log(`Created instance ${instanceId}.`); + } catch (err) { + console.error('ERROR:', err); + } + // [END spanner_create_instance] +} + +const { + createInstanceWithProcessingUnits, +} = require('./instance-with-processing-units'); + +require('yargs') + .demand(1) + .command( + 'createInstance ', + 'Creates an example instance in a Cloud Spanner instance.', + {}, + opts => createInstance(opts.instanceName, opts.projectId) + ) + .example('node $0 createInstance "my-instance" "my-project-id"') + .command( + 'createInstanceWithProcessingUnits ', + 'Creates an example instance in a Cloud Spanner instance with processing units.', + {}, + opts => createInstanceWithProcessingUnits(opts.instanceName, opts.projectId) + ) + .example( + 'node $0 createInstanceWithProcessingUnits "my-instance" "my-project-id"' + ) + .wrap(120) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/spanner/docs') + .strict() + .help().argv; diff --git a/samples/archived/json-add-column.js b/samples/archived/json-add-column.js new file mode 100644 index 000000000..0eed31d0f --- /dev/null +++ b/samples/archived/json-add-column.js @@ -0,0 +1,53 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function addJsonColumn(instanceId, databaseId, projectId) { + // [START spanner_add_json_column] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['ALTER TABLE Venues ADD COLUMN VenueDetails JSON']; + + // Alter existing table to add a column. + const [operation] = await database.updateSchema(request); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + + await operation.promise(); + + console.log( + `Added VenueDetails column to Venues table in database ${databaseId}.` + ); + // [END spanner_add_json_column] +} + +module.exports.addJsonColumn = addJsonColumn; diff --git a/samples/archived/list-databases.js b/samples/archived/list-databases.js new file mode 100644 index 000000000..642600ff6 --- /dev/null +++ b/samples/archived/list-databases.js @@ -0,0 +1,58 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Lists all databases on the selected instance +// usage: node list-databases.js + +'use strict'; + +function main(instanceId, projectId) { + // [START spanner_list_databases] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + async function listDatabases() { + // Lists all databases on the instance. + const [databases] = await instance.getDatabases(); + console.log(`Databases for projects/${projectId}/instances/${instanceId}:`); + databases.forEach(database => { + const defaultLeader = database.metadata.defaultLeader + ? `(default leader = ${database.metadata.defaultLeader})` + : ''; + console.log(`\t${database.id} ${defaultLeader}`); + }); + } + listDatabases(); + // [END spanner_list_databases] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/list-instance-configs.js b/samples/archived/list-instance-configs.js new file mode 100644 index 000000000..7171acf80 --- /dev/null +++ b/samples/archived/list-instance-configs.js @@ -0,0 +1,59 @@ +/** + * Copyright 2021 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Lists all the available instance configs for the selected project. +// usage: node list-instance-configs.js + +'use strict'; + +function main(projectId) { + // [START spanner_list_instance_configs] + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const projectId = 'my-project-id'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function listInstanceConfigs() { + // Lists all available instance configurations in the project. + // See https://cloud.google.com/spanner/docs/instance-configurations#configuration for a list of all available + // configurations. + const [instanceConfigs] = await spanner.getInstanceConfigs(); + console.log(`Available instance configs for project ${projectId}:`); + instanceConfigs.forEach(instanceConfig => { + console.log( + `Available leader options for instance config ${ + instanceConfig.name + } ('${instanceConfig.displayName}'): + ${instanceConfig.leaderOptions.join()}` + ); + }); + } + listInstanceConfigs(); + // [END spanner_list_instance_configs] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/numeric-add-column.js b/samples/archived/numeric-add-column.js new file mode 100644 index 000000000..4ab81a0ea --- /dev/null +++ b/samples/archived/numeric-add-column.js @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function addNumericColumn(instanceId, databaseId, projectId) { + // [START spanner_add_numeric_column] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['ALTER TABLE Venues ADD COLUMN Revenue NUMERIC']; + + // Alter existing table to add a column. + const [operation] = await database.updateSchema(request); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + + await operation.promise(); + + console.log( + `Added Revenue column to Venues table in database ${databaseId}.` + ); + // [END spanner_add_numeric_column] +} + +module.exports.addNumericColumn = addNumericColumn; diff --git a/samples/archived/pg-add-column.js b/samples/archived/pg-add-column.js new file mode 100644 index 000000000..a64ef4872 --- /dev/null +++ b/samples/archived/pg-add-column.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Adds a column to an existing table in a Spanner PostgreSQL database. +// usage: node pg-add-column.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_postgresql_add_column] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function pgAddColumn() { + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT']; + + // Alter existing table to add a column. + const [operation] = await database.updateSchema(request); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + + await operation.promise(); + + console.log( + `Added MarketingBudget column to Albums table in database ${databaseId}.` + ); + } + pgAddColumn(); + // [END spanner_postgresql_add_column] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-database-create.js b/samples/archived/pg-database-create.js new file mode 100644 index 000000000..4eb5abc7d --- /dev/null +++ b/samples/archived/pg-database-create.js @@ -0,0 +1,95 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates a PostgreSQL Database. +// usage: node pg-database-create.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_postgresql_create_database] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function createPgDatabase() { + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + // Set Dialect as PostgreSQL + const request = { + databaseDialect: Spanner.POSTGRESQL, + }; + + // Creates a PostgreSQL database. PostgreSQL create requests may not contain any additional + // DDL statements. We need to execute these separately after the database has been created. + const [database, operationCreate] = await instance.createDatabase( + databaseId, + request + ); + + console.log(`Waiting for operation on ${database.id} to complete...`); + await operationCreate.promise(); + await database.getMetadata(); + console.log( + `Created database ${databaseId} on instance ${instanceId} with dialect ${database.metadata.databaseDialect}.` + ); + + // Create a couple of tables using a separate request. We must use PostgreSQL style DDL as the + // database has been created with the PostgreSQL dialect. + const statements = [ + `CREATE TABLE Singers + (SingerId bigint NOT NULL, + FirstName varchar(1024), + LastName varchar(1024), + SingerInfo bytea, + FullName character varying(2048) GENERATED ALWAYS AS (FirstName || ' ' || LastName) STORED, + PRIMARY KEY (SingerId) + ); + CREATE TABLE Albums + (AlbumId bigint NOT NULL, + SingerId bigint NOT NULL REFERENCES Singers (SingerId), + AlbumTitle text, + PRIMARY KEY (AlbumId) + );`, + ]; + const [operationUpdateDDL] = await database.updateSchema(statements); + await operationUpdateDDL.promise(); + console.log('Updated schema'); + } + createPgDatabase(); + // [END spanner_postgresql_create_database] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-index-create-storing.js b/samples/archived/pg-index-create-storing.js new file mode 100644 index 000000000..878745ab3 --- /dev/null +++ b/samples/archived/pg-index-create-storing.js @@ -0,0 +1,74 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates a new storing index in a Spanner PostgreSQL database. +// usage: node pg-index-create-storing.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_postgresql_create_storing_index] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function pgCreateStoringIndex() { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + 'CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle) INCLUDE(MarketingBudget)', + ]; + + // Creates a new index in the database + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Added the AlbumsByAlbumTitle index.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + } + pgCreateStoringIndex(); + // [END spanner_postgresql_create_storing_index] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-interleaving.js b/samples/archived/pg-interleaving.js new file mode 100644 index 000000000..c61a594b6 --- /dev/null +++ b/samples/archived/pg-interleaving.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Created interleaved table hierarchy using PostgreSQL dialect. +// usage: node pg-interleaving.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_postgresql_interleaved_table] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function pgInterleaving() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const statements = [ + `CREATE TABLE Author + (AuthorId bigint NOT NULL, + FirstName varchar(1024), + LastName varchar(1024), + Rating double precision, + PRIMARY KEY (AuthorId) + ); + CREATE TABLE Book + (AuthorId bigint NOT NULL, + BookId bigint NOT NULL, + BookTitle text, + PRIMARY KEY (AuthorId, BookId) + ) INTERLEAVE IN PARENT Author ON DELETE CASCADE;`, + ]; + + // Updates schema by adding new tables. + const [operation] = await database.updateSchema(statements); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + await operation.promise(); + + console.log( + `Created an interleaved table hierarchy in database ${databaseId} using PostgreSQL dialect.` + ); + } + pgInterleaving(); + // [END spanner_postgresql_interleaved_table] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-jsonb-add-column.js b/samples/archived/pg-jsonb-add-column.js new file mode 100644 index 000000000..8b7ea79b7 --- /dev/null +++ b/samples/archived/pg-jsonb-add-column.js @@ -0,0 +1,65 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Showcase how add a jsonb column in a PostgreSQL table. +// usage: node pg-jsonb-add-column.js + +'use strict'; + +function main( + instanceId = 'my-instance', + databaseId = 'my-database', + projectId = 'my-project-id' +) { + // [START spanner_postgresql_jsonb_add_column] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + // const projectId = 'my-project-id'; + + // Imports the Google Cloud Spanner client library + const {Spanner} = require('@google-cloud/spanner'); + + // Instantiates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function pgJsonbAddColumn() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['ALTER TABLE Venues ADD COLUMN VenueDetails JSONB']; + + // Updates schema by adding a new table. + const [operation] = await database.updateSchema(request); + console.log(`Waiting for operation on ${databaseId} to complete...`); + await operation.promise(); + console.log( + `Added jsonb column to table venues to database ${databaseId}.` + ); + } + pgJsonbAddColumn(); + // [END spanner_postgresql_jsonb_add_column] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-sequence-alter.js b/samples/archived/pg-sequence-alter.js new file mode 100644 index 000000000..a0c41e9d5 --- /dev/null +++ b/samples/archived/pg-sequence-alter.js @@ -0,0 +1,95 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Alters a sequence in a PostgreSQL database. +// usage: node pg-sequence-alter.js + +'use strict'; + +async function main(instanceId, databaseId, projectId) { + // [START spanner_postgresql_alter_sequence] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function alterSequence(instanceId, databaseId) { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['ALTER SEQUENCE Seq SKIP RANGE 1000 5000000']; + + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log( + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + ); + } catch (err) { + console.error('ERROR:', err); + } + database.runTransaction(async (err, transaction) => { + if (err) { + console.error(err); + return; + } + try { + const [rows, stats] = await transaction.run({ + sql: "INSERT INTO Customers (CustomerName) VALUES ('Lea'), ('Catalina'), ('Smith') RETURNING CustomerId", + }); + + rows.forEach(row => { + console.log( + `Inserted customer record with CustomerId: ${ + row.toJSON({wrapNumbers: true}).customerid.value + }` + ); + }); + + const rowCount = Math.floor(stats[stats.rowCount]); + console.log(`Number of customer records inserted is: ${rowCount}`); + + await transaction.commit(); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + }); + } + await alterSequence(instanceId, databaseId); + // [END spanner_postgresql_alter_sequence] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-sequence-create.js b/samples/archived/pg-sequence-create.js new file mode 100644 index 000000000..45030bf91 --- /dev/null +++ b/samples/archived/pg-sequence-create.js @@ -0,0 +1,99 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates sequence in PostgreSQL database. +// usage: node pg-sequence-create.js + +'use strict'; + +async function main(instanceId, databaseId, projectId) { + // [START spanner_postgresql_create_sequence] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function createSequence(instanceId, databaseId) { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + 'CREATE SEQUENCE Seq BIT_REVERSED_POSITIVE', + "CREATE TABLE Customers (CustomerId BIGINT DEFAULT nextval('Seq'), CustomerName character varying(1024), PRIMARY KEY (CustomerId))", + ]; + + // Creates a new table with sequence + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log( + 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value' + ); + } catch (err) { + console.error('ERROR:', err); + } + database.runTransaction(async (err, transaction) => { + if (err) { + console.error(err); + return; + } + try { + const [rows, stats] = await transaction.run({ + sql: "INSERT INTO Customers (CustomerName) VALUES ('Alice'), ('David'), ('Marc') RETURNING CustomerId", + }); + + rows.forEach(row => { + console.log( + `Inserted customer record with CustomerId: ${ + row.toJSON({wrapNumbers: true}).customerid.value + }` + ); + }); + + const rowCount = Math.floor(stats[stats.rowCount]); + console.log(`Number of customer records inserted is: ${rowCount}`); + + await transaction.commit(); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + }); + } + await createSequence(instanceId, databaseId); + // [END spanner_postgresql_create_sequence] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/pg-sequence-drop.js b/samples/archived/pg-sequence-drop.js new file mode 100644 index 000000000..624c80a66 --- /dev/null +++ b/samples/archived/pg-sequence-drop.js @@ -0,0 +1,73 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Drops a sequence in PostgreSQL database. +// usage: node pg-sequence-drop.js + +'use strict'; + +async function main(instanceId, databaseId, projectId) { + // [START spanner_drop_sequence] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function dropSequence(instanceId, databaseId) { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + 'ALTER TABLE Customers ALTER COLUMN CustomerId DROP DEFAULT', + 'DROP SEQUENCE Seq', + ]; + + // Drop sequence from DDL + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log( + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + ); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + } + await dropSequence(instanceId, databaseId); + // [END spanner_drop_sequence] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/schema.js b/samples/archived/schema.js new file mode 100644 index 000000000..8c746c2e6 --- /dev/null +++ b/samples/archived/schema.js @@ -0,0 +1,232 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function createDatabase(instanceId, databaseId, projectId) { + // [START spanner_create_database] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + + // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they + // must be converted to strings before being inserted as INT64s + const request = { + schema: [ + `CREATE TABLE Singers ( + SingerId INT64 NOT NULL, + FirstName STRING(1024), + LastName STRING(1024), + SingerInfo BYTES(MAX), + FullName STRING(2048) AS (ARRAY_TO_STRING([FirstName, LastName], " ")) STORED, + ) PRIMARY KEY (SingerId)`, + `CREATE TABLE Albums ( + SingerId INT64 NOT NULL, + AlbumId INT64 NOT NULL, + AlbumTitle STRING(MAX) + ) PRIMARY KEY (SingerId, AlbumId), + INTERLEAVE IN PARENT Singers ON DELETE CASCADE`, + ], + }; + + // Creates a database + const [database, operation] = await instance.createDatabase( + databaseId, + request + ); + + console.log(`Waiting for operation on ${database.id} to complete...`); + await operation.promise(); + + console.log(`Created database ${databaseId} on instance ${instanceId}.`); + // [END spanner_create_database] +} + +async function addColumn(instanceId, databaseId, projectId) { + // [START spanner_add_column] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = ['ALTER TABLE Albums ADD COLUMN MarketingBudget INT64']; + + // Creates a new index in the database + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Added the MarketingBudget column.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_add_column] +} + +async function queryDataWithNewColumn(instanceId, databaseId, projectId) { + // [START spanner_query_data_with_new_column] + // This sample uses the `MarketingBudget` column. You can add the column + // by running the `add_column` sample or by running this DDL statement against + // your database: + // ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const query = { + sql: 'SELECT SingerId, AlbumId, MarketingBudget FROM Albums', + }; + + // Queries rows from the Albums table + try { + const [rows] = await database.run(query); + + rows.forEach(async row => { + const json = row.toJSON(); + + console.log( + `SingerId: ${json.SingerId}, AlbumId: ${ + json.AlbumId + }, MarketingBudget: ${ + json.MarketingBudget ? json.MarketingBudget : null + }` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_query_data_with_new_column] +} + +const { + createDatabaseWithVersionRetentionPeriod, +} = require('./database-create-with-version-retention-period'); +const { + createDatabaseWithEncryptionKey, +} = require('./database-create-with-encryption-key'); + +require('yargs') + .demand(1) + .command( + 'createDatabase ', + 'Creates an example database with two tables in a Cloud Spanner instance.', + {}, + opts => createDatabase(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'createDatabaseWithEncryptionKey ', + 'Creates an example database using given encryption key in a Cloud Spanner instance.', + {}, + opts => + createDatabaseWithEncryptionKey( + opts.instanceName, + opts.databaseName, + opts.projectId, + opts.keyName + ) + ) + .command( + 'addColumn ', + 'Adds an example MarketingBudget column to an example Cloud Spanner table.', + {}, + opts => addColumn(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryNewColumn ', + 'Executes a read-only SQL query against an example Cloud Spanner table with an additional column (MarketingBudget) added by addColumn.', + {}, + opts => + queryDataWithNewColumn( + opts.instanceName, + opts.databaseName, + opts.projectId + ) + ) + .command( + 'createDatabaseWithVersionRetentionPeriod ', + 'Creates a database with a version retention period.', + {}, + opts => + createDatabaseWithVersionRetentionPeriod( + opts.instanceName, + opts.databaseId, + opts.projectId + ) + ) + .example('node $0 createDatabase "my-instance" "my-database" "my-project-id"') + .example( + 'node $0 createDatabaseWithEncryptionKey "my-instance" "my-database" "my-project-id" "key-name"' + ) + .example('node $0 addColumn "my-instance" "my-database" "my-project-id"') + .example('node $0 queryNewColumn "my-instance" "my-database" "my-project-id"') + .example( + 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"' + ) + .wrap(120) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/spanner/docs') + .strict() + .help().argv; diff --git a/samples/archived/sequence-alter.js b/samples/archived/sequence-alter.js new file mode 100644 index 000000000..7b5f363ae --- /dev/null +++ b/samples/archived/sequence-alter.js @@ -0,0 +1,97 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Alters a sequence in a GoogleSQL database. +// usage: node sequence-alter.js + +'use strict'; + +async function main(instanceId, databaseId, projectId) { + // [START spanner_alter_sequence] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function alterSequence(instanceId, databaseId) { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + 'ALTER SEQUENCE Seq SET OPTIONS (skip_range_min = 1000, skip_range_max = 5000000)', + ]; + + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log( + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + ); + } catch (err) { + console.error('ERROR:', err); + } + database.runTransaction(async (err, transaction) => { + if (err) { + console.error(err); + return; + } + try { + const [rows, stats] = await transaction.run({ + sql: "INSERT INTO Customers (CustomerName) VALUES ('Lea'), ('Catalina'), ('Smith') THEN RETURN CustomerId", + }); + + rows.forEach(row => { + console.log( + `Inserted customer record with CustomerId: ${ + row.toJSON({wrapNumbers: true}).CustomerId.value + }` + ); + }); + + const rowCount = Math.floor(stats[stats.rowCount]); + console.log(`Number of customer records inserted is: ${rowCount}`); + + await transaction.commit(); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + }); + } + await alterSequence(instanceId, databaseId); + // [END spanner_alter_sequence] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/sequence-create.js b/samples/archived/sequence-create.js new file mode 100644 index 000000000..de95647e6 --- /dev/null +++ b/samples/archived/sequence-create.js @@ -0,0 +1,99 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates sequence in GoogleSQL database. +// usage: node sequence-create.js + +'use strict'; + +async function main(instanceId, databaseId, projectId) { + // [START spanner_create_sequence] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function createSequence(instanceId, databaseId) { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + "CREATE SEQUENCE Seq OPTIONS (sequence_kind = 'bit_reversed_positive')", + 'CREATE TABLE Customers (CustomerId INT64 DEFAULT (GET_NEXT_SEQUENCE_VALUE(Sequence Seq)), CustomerName STRING(1024)) PRIMARY KEY (CustomerId)', + ]; + + // Creates a new table with sequence + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log( + 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value.' + ); + } catch (err) { + console.error('ERROR:', err); + } + database.runTransaction(async (err, transaction) => { + if (err) { + console.error(err); + return; + } + try { + const [rows, stats] = await transaction.run({ + sql: "INSERT INTO Customers (CustomerName) VALUES ('Alice'), ('David'), ('Marc') THEN RETURN CustomerId", + }); + + rows.forEach(row => { + console.log( + `Inserted customer record with CustomerId: ${ + row.toJSON({wrapNumbers: true}).CustomerId.value + }` + ); + }); + + const rowCount = Math.floor(stats[stats.rowCount]); + console.log(`Number of customer records inserted is: ${rowCount}`); + + await transaction.commit(); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + }); + } + await createSequence(instanceId, databaseId); + // [END spanner_create_sequence] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/sequence-drop.js b/samples/archived/sequence-drop.js new file mode 100644 index 000000000..915eb8eea --- /dev/null +++ b/samples/archived/sequence-drop.js @@ -0,0 +1,73 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Drops a sequence in GoogleSQL database. +// usage: node sequence-drop.js + +'use strict'; + +async function main(instanceId, databaseId, projectId) { + // [START spanner_drop_sequence] + // Imports the Google Cloud client library. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function dropSequence(instanceId, databaseId) { + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + 'ALTER TABLE Customers ALTER COLUMN CustomerId DROP DEFAULT', + 'DROP SEQUENCE Seq', + ]; + + // Drop sequence from DDL + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log( + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + ); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + await database.close(); + } + } + await dropSequence(instanceId, databaseId); + // [END spanner_drop_sequence] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/table-alter-with-foreign-key-delete-cascade.js b/samples/archived/table-alter-with-foreign-key-delete-cascade.js new file mode 100644 index 000000000..444e864a3 --- /dev/null +++ b/samples/archived/table-alter-with-foreign-key-delete-cascade.js @@ -0,0 +1,64 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Alters a table with foreign key delete cascade action +// usage: node table-alter-with-foreign-key-delete-cascade.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_alter_table_with_foreign_key_delete_cascade] + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function alterTableWithForeignKeyDeleteCascade() { + const [operation] = await database.updateSchema([ + `ALTER TABLE ShoppingCarts + ADD CONSTRAINT FKShoppingCartsCustomerName + FOREIGN KEY (CustomerName) + REFERENCES Customers(CustomerName) + ON DELETE CASCADE`, + ]); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + await operation.promise(); + + console.log('Altered ShoppingCarts table with FKShoppingCartsCustomerName'); + } + alterTableWithForeignKeyDeleteCascade(); + // [END spanner_alter_table_with_foreign_key_delete_cascade] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/table-create-with-foreign-key-delete-cascade.js b/samples/archived/table-create-with-foreign-key-delete-cascade.js new file mode 100644 index 000000000..a41faf7f5 --- /dev/null +++ b/samples/archived/table-create-with-foreign-key-delete-cascade.js @@ -0,0 +1,72 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Creates a table with foreign key delete cascade action +// usage: node table-create-with-foreign-key-delete-cascade.js.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_create_table_with_foreign_key_delete_cascade] + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function createTableWithForeignKeyDeleteCascade() { + const [operation] = await database.updateSchema([ + `CREATE TABLE Customers ( + CustomerId INT64, + CustomerName STRING(62) NOT NULL + ) PRIMARY KEY (CustomerId)`, + `CREATE TABLE ShoppingCarts ( + CartId INT64 NOT NULL, + CustomerId INT64 NOT NULL, + CustomerName STRING(62) NOT NULL, + CONSTRAINT FKShoppingCartsCustomerId FOREIGN KEY (CustomerId) + REFERENCES Customers (CustomerId) ON DELETE CASCADE, + ) PRIMARY KEY (CartId)`, + ]); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + await operation.promise(); + + console.log( + 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId' + ); + } + createTableWithForeignKeyDeleteCascade(); + // [END spanner_create_table_with_foreign_key_delete_cascade] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js b/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js new file mode 100644 index 000000000..def4292c2 --- /dev/null +++ b/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js @@ -0,0 +1,63 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Drops a foreign key constraint with delete cascade action +// usage: node table-drop-foreign-key-constraint-delete-cascade.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_drop_foreign_key_constraint_delete_cascade] + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + async function dropForeignKeyConstraintDeleteCascade() { + const [operation] = await database.updateSchema([ + `ALTER TABLE ShoppingCarts + DROP CONSTRAINT FKShoppingCartsCustomerName`, + ]); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + await operation.promise(); + + console.log( + 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName' + ); + } + dropForeignKeyConstraintDeleteCascade(); + // [END spanner_drop_foreign_key_constraint_delete_cascade] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/archived/timestamp.js b/samples/archived/timestamp.js new file mode 100644 index 000000000..c5f13ad9d --- /dev/null +++ b/samples/archived/timestamp.js @@ -0,0 +1,421 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function createTableWithTimestamp(instanceId, databaseId, projectId) { + // [START spanner_create_table_with_timestamp_column] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they + // must be converted to strings before being inserted as INT64s + const request = [ + `CREATE TABLE Performances ( + SingerId INT64 NOT NULL, + VenueId INT64 NOT NULL, + EventDate DATE, + Revenue INT64, + LastUpdateTime TIMESTAMP NOT NULL OPTIONS (allow_commit_timestamp=true) + ) PRIMARY KEY (SingerId, VenueId, EventDate), + INTERLEAVE IN PARENT Singers ON DELETE CASCADE`, + ]; + + // Creates a table in an existing database + const [operation] = await database.updateSchema(request); + + console.log(`Waiting for operation on ${databaseId} to complete...`); + + await operation.promise(); + + console.log(`Created table Performances in database ${databaseId}.`); + // [END spanner_create_table_with_timestamp_column] +} + +async function insertWithTimestamp(instanceId, databaseId, projectId) { + // [START spanner_insert_data_with_timestamp_column] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Instantiate Spanner table objects + const performancesTable = database.table('Performances'); + + const data = [ + { + SingerId: '1', + VenueId: '4', + EventDate: '2017-10-05', + Revenue: '11000', + LastUpdateTime: 'spanner.commit_timestamp()', + }, + { + SingerId: '1', + VenueId: '19', + EventDate: '2017-11-02', + Revenue: '15000', + LastUpdateTime: 'spanner.commit_timestamp()', + }, + { + SingerId: '2', + VenueId: '42', + EventDate: '2017-12-23', + Revenue: '7000', + LastUpdateTime: 'spanner.commit_timestamp()', + }, + ]; + + // Inserts rows into the Singers table + // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so + // they must be converted to strings before being inserted as INT64s + try { + await performancesTable.insert(data); + console.log('Inserted data.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished + database.close(); + } + // [END spanner_insert_data_with_timestamp_column] +} + +async function queryTableWithTimestamp(instanceId, databaseId, projectId) { + // [START spanner_query_new_table_with_timestamp_column] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const query = { + sql: `SELECT SingerId, VenueId, EventDate, Revenue, LastUpdateTime + FROM Performances + ORDER BY LastUpdateTime DESC`, + }; + + // Queries rows from the Performances table + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + console.log( + `SingerId: ${json.SingerId}, VenueId: ${json.VenueId}, EventDate: ${json.EventDate}, Revenue: ${json.Revenue}, LastUpdateTime: ${json.LastUpdateTime}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished + database.close(); + } + // [END spanner_query_new_table_with_timestamp_column] +} + +async function addTimestampColumn(instanceId, databaseId, projectId) { + // [START spanner_add_timestamp_column] + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const request = [ + `ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP OPTIONS + (allow_commit_timestamp=true)`, + ]; + + // Adds a new commit timestamp column to the Albums table + try { + const [operation] = await database.updateSchema(request); + + console.log('Waiting for operation to complete...'); + + await operation.promise(); + + console.log( + 'Added LastUpdateTime as a commit timestamp column in Albums table.' + ); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished. + database.close(); + } + // [END spanner_add_timestamp_column] +} + +async function updateWithTimestamp(instanceId, databaseId, projectId) { + // [START spanner_update_data_with_timestamp_column] + // [START_EXCLUDE] + // This sample uses the `MarketingBudget` column. You can add the column + // by running the `schema.js addColumn` sample or by running this DDL statement against + // your database: + // ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + // + // In addition this update expects the `LastUpdateTime` column + // added by running the `timestamp.js addTimestampColumn` sample + // or applying the DDL statement: + // ALTER TABLE Albums ADD COLUMN + // LastUpdateTime TIMESTAMP OPTIONS (allow_commit_timestamp=true) + // [END_EXCLUDE] + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + // Update a row in the Albums table + // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they + // must be converted to strings before being inserted as INT64s + const albumsTable = database.table('Albums'); + + const data = [ + { + SingerId: '1', + AlbumId: '1', + MarketingBudget: '1000000', + LastUpdateTime: 'spanner.commit_timestamp()', + }, + { + SingerId: '2', + AlbumId: '2', + MarketingBudget: '750000', + LastUpdateTime: 'spanner.commit_timestamp()', + }, + ]; + + try { + await albumsTable.update(data); + console.log('Updated data.'); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished + database.close(); + } + // [END spanner_update_data_with_timestamp_column] +} + +async function queryWithTimestamp(instanceId, databaseId, projectId) { + // [START spanner_query_data_with_timestamp_column] + // [START_EXCLUDE] + // This sample uses the `MarketingBudget` column. You can add the column + // by running the `schema.js addColumn` sample or by running this DDL statement against + // your database: + // ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + // + // In addition this query expects the `LastUpdateTime` column + // added by running the `timestamp.js addTimestampColumn` sample + // or applying the DDL statement: + // ALTER TABLE Albums ADD COLUMN + // LastUpdateTime TIMESTAMP OPTIONS (allow_commit_timestamp=true) + // [END_EXCLUDE] + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const query = { + sql: `SELECT SingerId, AlbumId, MarketingBudget, LastUpdateTime + FROM Albums ORDER BY LastUpdateTime DESC`, + }; + + // Queries rows from the Albums table + try { + const [rows] = await database.run(query); + + rows.forEach(row => { + const json = row.toJSON(); + + console.log( + `SingerId: ${json.SingerId}, AlbumId: ${ + json.AlbumId + }, MarketingBudget: ${ + json.MarketingBudget ? json.MarketingBudget : null + }, LastUpdateTime: ${json.LastUpdateTime}` + ); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + // Close the database when finished + database.close(); + } + // [END spanner_query_data_with_timestamp_column] +} + +require('yargs') + .demand(1) + .command( + 'createTableWithTimestamp ', + 'Creates an example table with a commit timestamp column in a Cloud Spanner database.', + {}, + opts => + createTableWithTimestamp( + opts.instanceName, + opts.databaseName, + opts.projectId + ) + ) + .command( + 'insertWithTimestamp ', + 'Inserts new rows of data including commit timestamps into an example Cloud Spanner table.', + {}, + opts => + insertWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryTableWithTimestamp ', + 'Executes a read-only SQL query against an example Cloud Spanner table created with a commit timestamp column.', + {}, + opts => + queryTableWithTimestamp( + opts.instanceName, + opts.databaseName, + opts.projectId + ) + ) + .command( + 'addTimestampColumn ', + 'Adds a example commit timestamp column to an existing example Cloud Spanner table.', + {}, + opts => + addTimestampColumn(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'updateWithTimestamp ', + 'Modifies existing rows of data in an example Cloud Spanner table with a commit timestamp column..', + {}, + opts => + updateWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + ) + .command( + 'queryWithTimestamp ', + `Executes a read-only SQL query against an example Cloud Spanner table with an additional + column (LastUpdateTime) added by addTimestampColumn.`, + {}, + opts => + queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + ) + .example( + 'node $0 createTableWithTimestamp "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 insertWithTimestamp "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 queryTableWithTimestamp "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 addTimestampColumn "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 updateWithTimestamp "my-instance" "my-database" "my-project-id"' + ) + .example( + 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' + ) + .wrap(120) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/spanner/docs') + .strict() + .help().argv; diff --git a/samples/backups-cancel.js b/samples/backups-cancel.js index b33cf26d8..cae7a0cde 100644 --- a/samples/backups-cancel.js +++ b/samples/backups-cancel.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -17,9 +17,9 @@ async function cancelBackup(instanceId, databaseId, backupId, projectId) { // [START spanner_cancel_backup_create] - // Imports the Google Cloud client library and precise date library - const {Spanner} = require('@google-cloud/spanner'); + // Imports the Google Cloud client library and precise date library + const {Spanner, protos} = require('@google-cloud/spanner'); /** * TODO(developer): Uncomment the following lines before running the sample. */ @@ -33,21 +33,33 @@ async function cancelBackup(instanceId, databaseId, backupId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const backup = instance.backup(backupId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Creates a new backup of the database try { - console.log(`Creating backup of database ${database.formattedName_}.`); - const databasePath = database.formattedName_; + console.log( + `Creating backup of database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}.` + ); + // Expire backup one day in the future const expireTime = Date.now() + 1000 * 60 * 60 * 24; - const [, operation] = await backup.create({ - databasePath: databasePath, - expireTime: expireTime, + const [operation] = await databaseAdminClient.createBackup({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + backupId: backupId, + backup: (protos.google.spanner.admin.database.v1.Backup = { + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + expireTime: Spanner.timestamp(expireTime).toStruct(), + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }), }); // Cancel the backup @@ -58,10 +70,12 @@ async function cancelBackup(instanceId, databaseId, backupId, projectId) { console.error('ERROR:', err); } finally { // Delete backup in case it got created before the cancel operation - await backup.delete(); - - // Close the database when finished. - await database.close(); + await databaseAdminClient.deleteBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } // [END spanner_cancel_backup_create] } diff --git a/samples/backups-copy.js b/samples/backups-copy.js index 9636be972..9ff142fcd 100644 --- a/samples/backups-copy.js +++ b/samples/backups-copy.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,15 +37,15 @@ function main( const {Spanner} = require('@google-cloud/spanner'); const {PreciseDate} = require('@google-cloud/precise-date'); - // Instantiates a client + // Creates a client const spanner = new Spanner({ projectId: projectId, }); - async function spannerCopyBackup() { - // Gets a reference to a Cloud Spanner instance and backup - const instance = spanner.instance(instanceId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function spannerCopyBackup() { // Expire copy backup 14 days in the future const expireTime = Spanner.timestamp( Date.now() + 1000 * 60 * 60 * 24 * 14 @@ -54,31 +54,34 @@ function main( // Copy the source backup try { console.log(`Creating copy of the source backup ${sourceBackupPath}.`); - const [, operation] = await instance.copyBackup( - sourceBackupPath, - backupId, - { - expireTime: expireTime, - } - ); + const [operation] = await databaseAdminClient.copyBackup({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + sourceBackup: sourceBackupPath, + backupId: backupId, + expireTime: expireTime, + }); console.log( - `Waiting for backup copy ${ - instance.backup(backupId).formattedName_ - } to complete...` + `Waiting for backup copy ${databaseAdminClient.backupPath( + projectId, + instanceId, + backupId + )} to complete...` ); await operation.promise(); // Verify the copy backup is ready - const copyBackup = instance.backup(backupId); - const [copyBackupInfo] = await copyBackup.getMetadata(); - if (copyBackupInfo.state === 'READY') { + const [copyBackup] = await databaseAdminClient.getBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); + + if (copyBackup.state === 'READY') { console.log( - `Backup copy ${copyBackupInfo.name} of size ` + - `${copyBackupInfo.sizeBytes} bytes was created at ` + - `${new PreciseDate(copyBackupInfo.createTime).toISOString()} ` + + `Backup copy ${copyBackup.name} of size ` + + `${copyBackup.sizeBytes} bytes was created at ` + + `${new PreciseDate(copyBackup.createTime).toISOString()} ` + 'with version time ' + - `${new PreciseDate(copyBackupInfo.versionTime).toISOString()}` + `${new PreciseDate(copyBackup.versionTime).toISOString()}` ); } else { console.error('ERROR: Copy of backup is not ready.'); diff --git a/samples/backups-create-with-encryption-key.js b/samples/backups-create-with-encryption-key.js index 96433971e..008a2b6ea 100644 --- a/samples/backups-create-with-encryption-key.js +++ b/samples/backups-create-with-encryption-key.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -23,8 +23,9 @@ async function createBackupWithEncryptionKey( keyName ) { // [START spanner_create_backup_with_encryption_key] - // Imports the Google Cloud client library and precise date library - const {Spanner} = require('@google-cloud/spanner'); + + // Imports the Google Cloud client library + const {Spanner, protos} = require('@google-cloud/spanner'); const {PreciseDate} = require('@google-cloud/precise-date'); /** @@ -34,7 +35,6 @@ async function createBackupWithEncryptionKey( // const instanceId = 'my-instance'; // const databaseId = 'my-database'; // const backupId = 'my-backup'; - // const versionTime = Date.now() - 1000 * 60 * 60 * 24; // One day ago // const keyName = // 'projects/my-project-id/my-region/keyRings/my-key-ring/cryptoKeys/my-key'; @@ -43,33 +43,54 @@ async function createBackupWithEncryptionKey( projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const backup = instance.backup(backupId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Creates a new backup of the database try { - console.log(`Creating backup of database ${database.formattedName_}.`); - const databasePath = database.formattedName_; + console.log( + `Creating backup of database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}.` + ); + // Expire backup 14 days in the future const expireTime = Date.now() + 1000 * 60 * 60 * 24 * 14; + // Create a backup of the state of the database at the current time. - const [, operation] = await backup.create({ - databasePath: databasePath, - expireTime: expireTime, + const [operation] = await databaseAdminClient.createBackup({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + backupId: backupId, + backup: (protos.google.spanner.admin.database.v1.Backup = { + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + expireTime: Spanner.timestamp(expireTime).toStruct(), + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }), encryptionConfig: { encryptionType: 'CUSTOMER_MANAGED_ENCRYPTION', kmsKeyName: keyName, }, }); - console.log(`Waiting for backup ${backup.formattedName_} to complete...`); + console.log( + `Waiting for backup ${databaseAdminClient.backupPath( + projectId, + instanceId, + backupId + )} to complete...` + ); await operation.promise(); // Verify backup is ready - const [backupInfo] = await backup.getMetadata(); + const [backupInfo] = await databaseAdminClient.getBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); if (backupInfo.state === 'READY') { console.log( `Backup ${backupInfo.name} of size ` + @@ -83,8 +104,9 @@ async function createBackupWithEncryptionKey( } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } // [END spanner_create_backup_with_encryption_key] } diff --git a/samples/backups-create.js b/samples/backups-create.js index 6a8af90e4..6af1578cc 100644 --- a/samples/backups-create.js +++ b/samples/backups-create.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -23,8 +23,9 @@ async function createBackup( versionTime ) { // [START spanner_create_backup] + // Imports the Google Cloud client library and precise date library - const {Spanner} = require('@google-cloud/spanner'); + const {Spanner, protos} = require('@google-cloud/spanner'); const {PreciseDate} = require('@google-cloud/precise-date'); /** @@ -41,30 +42,51 @@ async function createBackup( projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const backup = instance.backup(backupId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Creates a new backup of the database try { - console.log(`Creating backup of database ${database.formattedName_}.`); - const databasePath = database.formattedName_; + console.log( + `Creating backup of database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}.` + ); + // Expire backup 14 days in the future const expireTime = Date.now() + 1000 * 60 * 60 * 24 * 14; + // Create a backup of the state of the database at the current time. - const [, operation] = await backup.create({ - databasePath: databasePath, - expireTime: expireTime, - versionTime: versionTime, + const [operation] = await databaseAdminClient.createBackup({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + backupId: backupId, + backup: (protos.google.spanner.admin.database.v1.Backup = { + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + expireTime: Spanner.timestamp(expireTime).toStruct(), + versionTime: Spanner.timestamp(versionTime).toStruct(), + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }), }); - console.log(`Waiting for backup ${backup.formattedName_} to complete...`); + console.log( + `Waiting for backup ${databaseAdminClient.backupPath( + projectId, + instanceId, + backupId + )} to complete...` + ); await operation.promise(); // Verify backup is ready - const [backupInfo] = await backup.getMetadata(); + const [backupInfo] = await databaseAdminClient.getBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); if (backupInfo.state === 'READY') { console.log( `Backup ${backupInfo.name} of size ` + @@ -79,8 +101,9 @@ async function createBackup( } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } // [END spanner_create_backup] } diff --git a/samples/backups-delete.js b/samples/backups-delete.js index d1e8a2bfb..27189c3c9 100644 --- a/samples/backups-delete.js +++ b/samples/backups-delete.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -15,8 +15,9 @@ 'use strict'; -async function deleteBackup(instanceId, databaseId, backupId, projectId) { +async function deleteBackup(instanceId, backupId, projectId) { // [START spanner_delete_backup] + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); @@ -33,19 +34,23 @@ async function deleteBackup(instanceId, databaseId, backupId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and backup - const instance = spanner.instance(instanceId); - const backup = instance.backup(backupId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Delete the backup console.log(`Deleting backup ${backupId}.`); - await backup.delete(); + await databaseAdminClient.deleteBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); + console.log('Backup deleted.'); // Verify backup no longer exists - const exists = await backup.exists(); - if (exists) { + try { + await databaseAdminClient.getBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); console.error('Error: backup still exists.'); - } else { + } catch (err) { console.log('Backup deleted.'); } // [END spanner_delete_backup] diff --git a/samples/backups-get-database-operations.js b/samples/backups-get-database-operations.js index d2c00ca54..b4be6ffc0 100644 --- a/samples/backups-get-database-operations.js +++ b/samples/backups-get-database-operations.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -17,6 +17,7 @@ async function getDatabaseOperations(instanceId, projectId) { // [START spanner_list_database_operations] + // Imports the Google Cloud client library const {Spanner, protos} = require('@google-cloud/spanner'); @@ -31,15 +32,17 @@ async function getDatabaseOperations(instanceId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // List database operations try { - const [databaseOperations] = await instance.getDatabaseOperations({ - filter: - '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata)', - }); + const [databaseOperations] = + await databaseAdminClient.listDatabaseOperations({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + filter: + '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata)', + }); console.log('Optimize Database Operations:'); databaseOperations.forEach(databaseOperation => { const metadata = diff --git a/samples/backups-get-operations.js b/samples/backups-get-operations.js index 9369b3e74..626e6755e 100644 --- a/samples/backups-get-operations.js +++ b/samples/backups-get-operations.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -22,6 +22,7 @@ async function getBackupOperations( projectId ) { // [START spanner_list_backup_operations] + // Imports the Google Cloud client library const {Spanner, protos} = require('@google-cloud/spanner'); @@ -38,12 +39,13 @@ async function getBackupOperations( projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // List create backup operations try { - const [backupOperations] = await instance.getBackupOperations({ + const [backupOperations] = await databaseAdminClient.listBackupOperations({ + parent: databaseAdminClient.instancePath(projectId, instanceId), filter: '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) ' + `AND (metadata.database:${databaseId})`, @@ -69,7 +71,8 @@ async function getBackupOperations( '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) ' + `AND (metadata.source_backup:${backupId})` ); - const [backupOperations] = await instance.getBackupOperations({ + const [backupOperations] = await databaseAdminClient.listBackupOperations({ + parent: databaseAdminClient.instancePath(projectId, instanceId), filter: '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) ' + `AND (metadata.source_backup:${backupId})`, @@ -87,6 +90,10 @@ async function getBackupOperations( }); } catch (err) { console.error('ERROR:', err); + } finally { + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } // [END spanner_list_backup_operations] } diff --git a/samples/backups-get.js b/samples/backups-get.js index 05b536dab..f5ab5416f 100644 --- a/samples/backups-get.js +++ b/samples/backups-get.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -17,6 +17,7 @@ async function getBackups(instanceId, databaseId, backupId, projectId) { // [START spanner_list_backups] + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); @@ -33,79 +34,128 @@ async function getBackups(instanceId, databaseId, backupId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); try { + // Get the parent(instance) of the database + const parent = databaseAdminClient.instancePath(projectId, instanceId); + // List all backups - const [allBackups] = await instance.getBackups(); + const [allBackups] = await databaseAdminClient.listBackups({ + parent: parent, + }); + console.log('All backups:'); - allBackups.forEach(backup => { - console.log(backup.id); + allBackups.forEach(backups => { + if (backups.name) { + const backup = backups.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backup.substring(delimiter.length); + console.log(result); + } }); // List backups filtered by backup name - const [backupsByName] = await instance.getBackups({ + const [backupsByName] = await databaseAdminClient.listBackups({ + parent: parent, filter: `Name:${backupId}`, }); console.log('Backups matching backup name:'); backupsByName.forEach(backup => { - console.log(backup.id); + if (backup.name) { + const backupName = backup.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backupName.substring(delimiter.length); + console.log(result); + } }); // List backups expiring within 30 days const expireTime = new Date(); expireTime.setDate(expireTime.getDate() + 30); - const [backupsByExpiry] = await instance.getBackups({ + const [backupsByExpiry] = await databaseAdminClient.listBackups({ + parent: parent, filter: `expire_time < "${expireTime.toISOString()}"`, }); console.log('Backups expiring within 30 days:'); backupsByExpiry.forEach(backup => { - console.log(backup.id); + if (backup.name) { + const backupName = backup.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backupName.substring(delimiter.length); + console.log(result); + } }); // List backups filtered by database name - const [backupsByDbName] = await instance.getBackups({ + const [backupsByDbName] = await databaseAdminClient.listBackups({ + parent: parent, filter: `Database:${databaseId}`, }); console.log('Backups matching database name:'); backupsByDbName.forEach(backup => { - console.log(backup.id); + if (backup.name) { + const backupName = backup.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backupName.substring(delimiter.length); + console.log(result); + } }); // List backups filtered by backup size - const [backupsBySize] = await instance.getBackups({ + const [backupsBySize] = await databaseAdminClient.listBackups({ + parent: parent, filter: 'size_bytes > 100', }); console.log('Backups filtered by size:'); backupsBySize.forEach(backup => { - console.log(backup.id); + if (backup.name) { + const backupName = backup.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backupName.substring(delimiter.length); + console.log(result); + } }); // List backups that are ready that were created after a certain time const createTime = new Date(); createTime.setDate(createTime.getDate() - 1); - const [backupsByCreateTime] = await instance.getBackups({ + const [backupsByCreateTime] = await databaseAdminClient.listBackups({ + parent: parent, filter: `(state:READY) AND (create_time >= "${createTime.toISOString()}")`, }); console.log('Ready backups filtered by create time:'); backupsByCreateTime.forEach(backup => { - console.log(backup.id); + if (backup.name) { + const backupName = backup.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backupName.substring(delimiter.length); + console.log(result); + } }); // List backups using pagination - let getBackupsOptions = { - pageSize: 3, - gaxOptions: {autoPaginate: false}, - }; console.log('Get backups paginated:'); - do { - const [backups, nextQuery] = await instance.getBackups(getBackupsOptions); - backups.forEach(backup => { - console.log(backup.id); - }); - getBackupsOptions = nextQuery; - } while (getBackupsOptions); + const [backups] = await databaseAdminClient.listBackups({ + parent: parent, + pageSize: 3, + }); + backups.forEach(backup => { + if (backup.name) { + const backupName = backup.name; + const delimiter = + 'projects/' + projectId + '/instances/' + instanceId + '/backups/'; + const result = backupName.substring(delimiter.length); + console.log(result); + } + }); } catch (err) { console.error('ERROR:', err); } diff --git a/samples/backups-restore-with-encryption-key.js b/samples/backups-restore-with-encryption-key.js index c9ebb4003..7a2323246 100644 --- a/samples/backups-restore-with-encryption-key.js +++ b/samples/backups-restore-with-encryption-key.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -23,6 +23,7 @@ async function restoreBackupWithEncryptionKey( keyName ) { // [START spanner_restore_backup_with_encryption_key] + // Imports the Google Cloud client library and precise date library const {Spanner} = require('@google-cloud/spanner'); @@ -41,35 +42,39 @@ async function restoreBackupWithEncryptionKey( projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Restore the database console.log( - `Restoring database ${database.formattedName_} from backup ${backupId}.` - ); - const [, restoreOperation] = await database.restore( - `projects/${projectId}/instances/${instanceId}/backups/${backupId}`, - { - encryptionConfig: { - encryptionType: 'CUSTOMER_MANAGED_ENCRYPTION', - kmsKeyName: keyName, - }, - } + `Restoring database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )} from backup ${backupId}.` ); + const [restoreOperation] = await databaseAdminClient.restoreDatabase({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + databaseId: databaseId, + backup: databaseAdminClient.backupPath(projectId, instanceId, backupId), + encryptionConfig: { + encryptionType: 'CUSTOMER_MANAGED_ENCRYPTION', + kmsKeyName: keyName, + }, + }); // Wait for restore to complete console.log('Waiting for database restore to complete...'); await restoreOperation.promise(); console.log('Database restored from backup.'); - const restoreInfo = await database.getRestoreInfo(); - const [data] = await database.get(); + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), + }); console.log( - `Database ${restoreInfo.backupInfo.sourceDatabase} was restored ` + - `to ${databaseId} from backup ${restoreInfo.backupInfo.backup} ` + - `using encryption key ${data.metadata.encryptionConfig.kmsKeyName}.` + `Database ${metadata.restoreInfo.backupInfo.sourceDatabase} was restored ` + + `to ${databaseId} from backup ${metadata.restoreInfo.backupInfo.backup} ` + + `using encryption key ${metadata.encryptionConfig.kmsKeyName}.` ); // [END spanner_restore_backup_with_encryption_key] } diff --git a/samples/backups-restore.js b/samples/backups-restore.js index 467c7049e..182513479 100644 --- a/samples/backups-restore.js +++ b/samples/backups-restore.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -34,29 +34,38 @@ async function restoreBackup(instanceId, databaseId, backupId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Restore the database console.log( - `Restoring database ${database.formattedName_} from backup ${backupId}.` - ); - const [, restoreOperation] = await database.restore( - `projects/${projectId}/instances/${instanceId}/backups/${backupId}` + `Restoring database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )} from backup ${backupId}.` ); + const [restoreOperation] = await databaseAdminClient.restoreDatabase({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + databaseId: databaseId, + backup: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); // Wait for restore to complete console.log('Waiting for database restore to complete...'); await restoreOperation.promise(); console.log('Database restored from backup.'); - const restoreInfo = await database.getRestoreInfo(); + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), + }); console.log( - `Database ${restoreInfo.backupInfo.sourceDatabase} was restored ` + - `to ${databaseId} from backup ${restoreInfo.backupInfo.backup} ` + + `Database ${metadata.restoreInfo.backupInfo.sourceDatabase} was restored ` + + `to ${databaseId} from backup ${metadata.restoreInfo.backupInfo.backup} ` + 'with version time ' + - `${new PreciseDate(restoreInfo.backupInfo.versionTime).toISOString()}.` + `${new PreciseDate( + metadata.restoreInfo.backupInfo.versionTime + ).toISOString()}.` ); // [END spanner_restore_backup] } diff --git a/samples/backups-update.js b/samples/backups-update.js index 639513821..ad26a53c5 100644 --- a/samples/backups-update.js +++ b/samples/backups-update.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -17,8 +17,9 @@ async function updateBackup(instanceId, backupId, projectId) { // [START spanner_update_backup] + // Imports the Google Cloud client library and precise date library - const {Spanner} = require('@google-cloud/spanner'); + const {Spanner, protos} = require('@google-cloud/spanner'); const {PreciseDate} = require('@google-cloud/precise-date'); /** @@ -33,28 +34,51 @@ async function updateBackup(instanceId, backupId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and backup - const instance = spanner.instance(instanceId); - const backup = instance.backup(backupId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Read backup metadata and update expiry time try { - const currentExpireTime = await backup.getExpireTime(); - const maxExpireTime = backup.metadata.maxExpireTime; + const [metadata] = await databaseAdminClient.getBackup({ + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + }); + + const currentExpireTime = metadata.expireTime; + const maxExpireTime = metadata.maxExpireTime; const wantExpireTime = new PreciseDate(currentExpireTime); wantExpireTime.setDate(wantExpireTime.getDate() + 1); + // New expire time should be less than the max expire time const min = (currentExpireTime, maxExpireTime) => currentExpireTime < maxExpireTime ? currentExpireTime : maxExpireTime; const newExpireTime = new PreciseDate(min(wantExpireTime, maxExpireTime)); console.log( - `Backup ${backupId} current expire time: ${currentExpireTime.toISOString()}` + `Backup ${backupId} current expire time: ${Spanner.timestamp( + currentExpireTime + ).toISOString()}` ); - console.log(`Updating expire time to ${newExpireTime.toISOString()}`); - await backup.updateExpireTime(newExpireTime); + console.log( + `Updating expire time to ${Spanner.timestamp( + newExpireTime + ).toISOString()}` + ); + + await databaseAdminClient.updateBackup({ + backup: { + name: databaseAdminClient.backupPath(projectId, instanceId, backupId), + expireTime: Spanner.timestamp(newExpireTime).toStruct(), + }, + updateMask: (protos.google.protobuf.FieldMask = { + paths: ['expire_time'], + }), + }); console.log('Expire time updated.'); } catch (err) { console.error('ERROR:', err); + } finally { + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } // [END spanner_update_backup] } diff --git a/samples/backups.js b/samples/backups.js index 3cedeef18..efec54cde 100644 --- a/samples/backups.js +++ b/samples/backups.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -45,6 +45,9 @@ require('yargs') Date.parse(opts.versionTime) ) ) + .example( + 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id" "my-version-time"' + ) .command( 'createBackupWithEncryptionKey ', 'Creates a backup of a Cloud Spanner database using an encryption key.', @@ -58,6 +61,9 @@ require('yargs') opts.keyName ) ) + .example( + 'node $0 createBackupWithEncryptionKey "my-instance" "my-database" "my-backup" "my-project-id" "my-key-name"' + ) .command( 'cancelBackup ', 'Creates and cancels a backup of a Cloud Spanner database.', @@ -70,6 +76,9 @@ require('yargs') opts.projectId ) ) + .example( + 'node $0 cancelBackup "my-instance" "my-database" "my-backup" "my-project-id"' + ) .command( 'getBackups ', 'Lists backups in the instance with filters.', @@ -82,6 +91,9 @@ require('yargs') opts.projectId ) ) + .example( + 'node $0 getBackups "my-instance" "my-database" "my-backup" "my-project-id"' + ) .command( 'getBackupOperations ', 'Lists all backup operations in the instance.', @@ -94,18 +106,23 @@ require('yargs') opts.projectId ) ) + .example( + 'node $0 getBackupOperations "my-instance" "my-database" "my-backup" "my-project-id"' + ) .command( 'getDatabaseOperations ', 'Lists all database operations in the instance.', {}, opts => getDatabaseOperations(opts.instanceName, opts.projectId) ) + .example('node $0 getDatabaseOperations "my-instance" "my-project-id"') .command( 'updateBackup ', 'Updates the expire time of a backup.', {}, opts => updateBackup(opts.instanceName, opts.backupName, opts.projectId) ) + .example('node $0 updateBackup "my-instance" "my-backup" "my-project-id"') .command( 'restoreBackup ', 'Restores a Cloud Spanner database from a backup.', @@ -118,6 +135,9 @@ require('yargs') opts.projectId ) ) + .example( + 'node $0 restoreBackup "my-instance" "my-database" "my-backup" "my-project-id"' + ) .command( 'restoreBackupWithEncryptionKey ', 'Restores a Cloud Spanner database from a backup with an encryption key.', @@ -131,21 +151,16 @@ require('yargs') opts.keyName ) ) + .example( + 'node $0 restoreBackupWithEncryptionKey "my-instance" "my-database" "my-backup" "my-project-id" "my-key-name"' + ) .command( - 'deleteBackup ', + 'deleteBackup ', 'Deletes a backup.', {}, - opts => - deleteBackup( - opts.instanceName, - opts.databaseName, - opts.backupName, - opts.projectId - ) - ) - .example( - 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id"' + opts => deleteBackup(opts.instanceName, opts.backupName, opts.projectId) ) + .example('node $0 deleteBackup "my-instance" "my-backup" "my-project-id"') .wrap(120) .recommendCommands() .epilogue('For more information, see https://cloud.google.com/spanner/docs') diff --git a/samples/database-create-with-default-leader.js b/samples/database-create-with-default-leader.js index 7f804451c..8f70e16b6 100644 --- a/samples/database-create-with-default-leader.js +++ b/samples/database-create-with-default-leader.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -27,23 +27,29 @@ function main(instanceId, databaseId, defaultLeader, projectId) { // const projectId = 'my-project-id'; // const instanceId = 'my-instance-id'; // const databaseId = 'my-database-id'; - // const defaultLeader = 'my-default-leader'; + // const defaultLeader = 'my-default-leader'; example: 'asia-northeast1' // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function createDatabaseWithDefaultLeader() { // Create a new database with an extra statement which will alter the // database after creation to set the default leader. - console.log(`Creating database ${database.formattedName_}.`); + console.log( + `Creating database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}.` + ); const createSingersTableStatement = ` CREATE TABLE Singers ( SingerId INT64 NOT NULL, @@ -64,15 +70,18 @@ function main(instanceId, databaseId, defaultLeader, projectId) { const setDefaultLeaderStatement = ` ALTER DATABASE \`${databaseId}\` SET OPTIONS (default_leader = '${defaultLeader}')`; - const [, operation] = await database.create({ + + const [operation] = await databaseAdminClient.createDatabase({ + createStatement: 'CREATE DATABASE `' + databaseId + '`', extraStatements: [ createSingersTableStatement, createAlbumsStatement, setDefaultLeaderStatement, ], + parent: databaseAdminClient.instancePath(projectId, instanceId), }); - console.log(`Waiting for creation of ${database.id} to complete...`); + console.log(`Waiting for creation of ${databaseId} to complete...`); await operation.promise(); console.log( `Created database ${databaseId} with default leader ${defaultLeader}.` diff --git a/samples/database-create-with-encryption-key.js b/samples/database-create-with-encryption-key.js index 01d677978..047db39ba 100644 --- a/samples/database-create-with-encryption-key.js +++ b/samples/database-create-with-encryption-key.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,8 +21,9 @@ async function createDatabaseWithEncryptionKey( keyName ) { // [START spanner_create_database_with_encryption_key] + // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); + const {Spanner, protos} = require('@google-cloud/spanner'); /** * TODO(developer): Uncomment the following lines before running the sample. @@ -33,36 +34,36 @@ async function createDatabaseWithEncryptionKey( // const keyName = // 'projects/my-project-id/my-region/keyRings/my-key-ring/cryptoKeys/my-key'; - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); - - const request = { - encryptionConfig: { - kmsKeyName: keyName, - }, - }; + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Creates a database - const [database, operation] = await instance.createDatabase( - databaseId, - request - ); + const [operation] = await databaseAdminClient.createDatabase({ + createStatement: 'CREATE DATABASE `' + databaseId + '`', + parent: databaseAdminClient.instancePath(projectId, instanceId), + encryptionConfig: + (protos.google.spanner.admin.database.v1.EncryptionConfig = { + kmsKeyName: keyName, + }), + }); - console.log(`Waiting for operation on ${database.id} to complete...`); + console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); console.log(`Created database ${databaseId} on instance ${instanceId}.`); // Get encryption key - const [data] = await database.get(); + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), + }); console.log( - `Database encrypted with key ${data.metadata.encryptionConfig.kmsKeyName}.` + `Database encrypted with key ${metadata.encryptionConfig.kmsKeyName}.` ); // [END spanner_create_database_with_encryption_key] } diff --git a/samples/database-create-with-version-retention-period.js b/samples/database-create-with-version-retention-period.js index 33cfd5d5e..bbae86fd0 100644 --- a/samples/database-create-with-version-retention-period.js +++ b/samples/database-create-with-version-retention-period.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -21,6 +21,7 @@ async function createDatabaseWithVersionRetentionPeriod( projectId ) { // [START spanner_create_database_with_version_retention_period] + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); @@ -30,38 +31,48 @@ async function createDatabaseWithVersionRetentionPeriod( // const projectId = 'my-project-id'; // const instanceId = 'my-instance'; - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); try { // Create a new database with an extra statement which will alter the // database after creation to set the version retention period. - console.log(`Creating database ${instance.formattedName_}.`); + console.log( + `Creating database ${databaseAdminClient.instancePath( + projectId, + instanceId + )}.` + ); const versionRetentionStatement = ` ALTER DATABASE \`${databaseId}\` SET OPTIONS (version_retention_period = '1d')`; - const [, operation] = await database.create({ + + const [operation] = await databaseAdminClient.createDatabase({ + createStatement: 'CREATE DATABASE `' + databaseId + '`', extraStatements: [versionRetentionStatement], + parent: databaseAdminClient.instancePath(projectId, instanceId), }); - console.log(`Waiting for operation on ${database.id} to complete...`); + console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); console.log(` Created database ${databaseId} with version retention period.`); - const [data] = await database.get(); - console.log( - `Version retention period: ${data.metadata.versionRetentionPeriod}` - ); - const earliestVersionTime = Spanner.timestamp( - data.metadata.earliestVersionTime - ); - console.log(`Earliest version time: ${earliestVersionTime}`); + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), + }); + + console.log(`Version retention period: ${metadata.versionRetentionPeriod}`); + const milliseconds = + parseInt(metadata.earliestVersionTime.seconds, 10) * 1000 + + parseInt(metadata.earliestVersionTime.nanos, 10) / 1e6; + const date = new Date(milliseconds); + console.log(`Earliest version time: ${date.toString()}`); } catch (err) { console.error('ERROR:', err); } diff --git a/samples/database-get-ddl.js b/samples/database-get-ddl.js index 65efb3a25..2b2bd9608 100644 --- a/samples/database-get-ddl.js +++ b/samples/database-get-ddl.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -31,21 +31,32 @@ function main(instanceId, databaseId, projectId) { // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function getDatabaseDdl() { // Get the schema definition of the database. - const [ddlStatements] = await database.getSchema(); + const [ddlStatements] = await databaseAdminClient.getDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + }); - console.log(`Retrieved database DDL for ${database.formattedName_}:`); - ddlStatements.forEach(statement => { - console.log(`${statement};\n`); + console.log( + `Retrieved database DDL for ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}:` + ); + ddlStatements.statements.forEach(element => { + console.log(element); }); } getDatabaseDdl(); diff --git a/samples/database-get-default-leader.js b/samples/database-get-default-leader.js index fe11365a9..7b934d6fe 100644 --- a/samples/database-get-default-leader.js +++ b/samples/database-get-default-leader.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -20,7 +20,6 @@ 'use strict'; function main(instanceId, databaseId, projectId) { - // [START spanner_query_information_schema_database_options] /** * TODO(developer): Uncomment the following lines before running the sample. */ @@ -31,27 +30,21 @@ function main(instanceId, databaseId, projectId) { // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - async function getDatabaseDdl() { + const databaseAdminClient = spanner.getDatabaseAdminClient(); + + async function getDefaultLeader() { // Get the default leader option for the database. - const [rows] = await database.run({ - sql: ` - SELECT s.OPTION_NAME, s.OPTION_VALUE - FROM INFORMATION_SCHEMA.DATABASE_OPTIONS s - WHERE s.OPTION_NAME = 'default_leader'`, - json: true, + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), }); - if (rows.length > 0) { - const option = rows[0]; + if (metadata.defaultLeader !== '') { console.log( - `The ${option.OPTION_NAME} for ${databaseId} is ${option.OPTION_VALUE}` + `The default_leader for ${databaseId} is ${metadata.defaultLeader}` ); } else { console.log( @@ -59,8 +52,7 @@ function main(instanceId, databaseId, projectId) { ); } } - getDatabaseDdl(); - // [END spanner_query_information_schema_database_options] + getDefaultLeader(); } process.on('unhandledRejection', err => { console.error(err.message); diff --git a/samples/database-update-default-leader.js b/samples/database-update-default-leader.js index 62fd80d18..7a5c928a0 100644 --- a/samples/database-update-default-leader.js +++ b/samples/database-update-default-leader.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -32,22 +32,34 @@ function main(instanceId, databaseId, defaultLeader, projectId) { // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function updateDatabaseWithDefaultLeader() { - console.log(`Updating database ${database.formattedName_}.`); + console.log( + `Updating database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}.` + ); const setDefaultLeaderStatement = ` ALTER DATABASE \`${databaseId}\` SET OPTIONS (default_leader = '${defaultLeader}')`; - const [operation] = await database.updateSchema(setDefaultLeaderStatement); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: [setDefaultLeaderStatement], + }); - console.log(`Waiting for updating of ${database.id} to complete...`); + console.log(`Waiting for updating of ${databaseId} to complete...`); await operation.promise(); console.log( `Updated database ${databaseId} with default leader ${defaultLeader}.` diff --git a/samples/database-update.js b/samples/database-update.js index 583eda219..19855302b 100644 --- a/samples/database-update.js +++ b/samples/database-update.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,34 +31,51 @@ function main( // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library - const {Spanner} = require('@google-cloud/spanner'); + // Imports the Google Cloud client library + const {Spanner, protos} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function updateDatabase() { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function updateDatabase() { + // Update the database metadata fields try { - console.log(`Updating database ${database.id}.`); - const [operation] = await database.setMetadata({ - enableDropProtection: true, + console.log( + `Updating database ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}.` + ); + const [operation] = await databaseAdminClient.updateDatabase({ + database: { + name: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + enableDropProtection: true, + }, + // updateMask contains the fields to be updated in database + updateMask: (protos.google.protobuf.FieldMask = { + paths: ['enable_drop_protection'], + }), }); console.log( - `Waiting for update operation for ${database.id} to complete...` + `Waiting for update operation for ${databaseId} to complete...` ); await operation.promise(); - console.log(`Updated database ${database.id}.`); + console.log(`Updated database ${databaseId}.`); } catch (err) { console.log('ERROR:', err); } finally { - // Close the database when finished. - database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } updateDatabase(); diff --git a/samples/datatypes.js b/samples/datatypes.js index 4120718d5..4d877cd04 100644 --- a/samples/datatypes.js +++ b/samples/datatypes.js @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,8 +16,6 @@ async function createVenuesTable(instanceId, databaseId, projectId) { // [START spanner_create_table_with_datatypes] - // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); /** * TODO(developer): Uncomment the following lines before running the sample. @@ -26,14 +24,15 @@ async function createVenuesTable(instanceId, databaseId, projectId) { // const instanceId = 'my-instance'; // const databaseId = 'my-database'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ `CREATE TABLE Venues ( @@ -50,7 +49,14 @@ async function createVenuesTable(instanceId, databaseId, projectId) { ]; // Creates a table in an existing database. - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); @@ -60,560 +66,8 @@ async function createVenuesTable(instanceId, databaseId, projectId) { // [END spanner_create_table_with_datatypes] } -async function insertData(instanceId, databaseId, projectId) { - // [START spanner_insert_datatypes_data] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client. - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - // Instantiate Spanner table objects. - const venuesTable = database.table('Venues'); - const exampleBytes1 = new Buffer.from('Hello World 1'); - const exampleBytes2 = new Buffer.from('Hello World 2'); - const exampleBytes3 = new Buffer.from('Hello World 3'); - const availableDates1 = ['2020-12-01', '2020-12-02', '2020-12-03']; - const availableDates2 = ['2020-11-01', '2020-11-05', '2020-11-15']; - const availableDates3 = ['2020-10-01', '2020-10-07']; - - // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they - // must be converted to strings before being inserted as INT64s. - const data = [ - { - VenueId: '4', - VenueName: 'Venue 4', - VenueInfo: exampleBytes1, - Capacity: '1800', - AvailableDates: availableDates1, - LastContactDate: '2018-09-02', - OutdoorVenue: false, - PopularityScore: Spanner.float(0.85543), - LastUpdateTime: 'spanner.commit_timestamp()', - }, - { - VenueId: '19', - VenueName: 'Venue 19', - VenueInfo: exampleBytes2, - Capacity: '6300', - AvailableDates: availableDates2, - LastContactDate: '2019-01-15', - OutdoorVenue: true, - PopularityScore: Spanner.float(0.98716), - LastUpdateTime: 'spanner.commit_timestamp()', - }, - { - VenueId: '42', - VenueName: 'Venue 42', - VenueInfo: exampleBytes3, - Capacity: '3000', - AvailableDates: availableDates3, - LastContactDate: '2018-10-01', - OutdoorVenue: false, - PopularityScore: Spanner.float(0.72598), - LastUpdateTime: 'spanner.commit_timestamp()', - }, - ]; - - // Inserts rows into the Venues table. - try { - await venuesTable.insert(data); - console.log('Inserted data.'); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_insert_datatypes_data] -} - -async function queryWithArray(instanceId, databaseId, projectId) { - // [START spanner_query_with_array_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client. - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'date', - }; - - const parentFieldType = { - type: 'array', - child: fieldType, - }; - - const exampleArray = ['2020-10-01', '2020-11-01']; - - const query = { - sql: `SELECT VenueId, VenueName, AvailableDate FROM Venues v, - UNNEST(v.AvailableDates) as AvailableDate - WHERE AvailableDate in UNNEST(@availableDates)`, - params: { - availableDates: exampleArray, - }, - types: { - availableDates: parentFieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - rows.forEach(row => { - const availableDate = row[2]['value']; - const json = row.toJSON(); - console.log( - `VenueId: ${json.VenueId}, VenueName: ${ - json.VenueName - }, AvailableDate: ${JSON.stringify(availableDate).substring(1, 11)}` - ); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_array_parameter] -} - -async function queryWithBool(instanceId, databaseId, projectId) { - // [START spanner_query_with_bool_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'bool', - }; - - const exampleBool = true; - - const query = { - sql: `SELECT VenueId, VenueName, OutdoorVenue FROM Venues - WHERE OutdoorVenue = @outdoorVenue`, - params: { - outdoorVenue: exampleBool, - }, - types: { - outdoorVenue: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const json = row.toJSON(); - console.log( - `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` OutdoorVenue: ${json.OutdoorVenue}` - ); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_bool_parameter] -} - -async function queryWithBytes(instanceId, databaseId, projectId) { - // [START spanner_query_with_bytes_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'bytes', - }; - - const exampleBytes = new Buffer.from('Hello World 1'); - - const query = { - sql: `SELECT VenueId, VenueName FROM Venues - WHERE VenueInfo = @venueInfo`, - params: { - venueInfo: exampleBytes, - }, - types: { - venueInfo: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const json = row.toJSON(); - console.log(`VenueId: ${json.VenueId}, VenueName: ${json.VenueName}`); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_bytes_parameter] -} - -async function queryWithDate(instanceId, databaseId, projectId) { - // [START spanner_query_with_date_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client. - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'date', - }; - - const exampleDate = '2019-01-01'; - - const query = { - sql: `SELECT VenueId, VenueName, LastContactDate FROM Venues - WHERE LastContactDate < @lastContactDate`, - params: { - lastContactDate: exampleDate, - }, - types: { - lastContactDate: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const date = row[2]['value']; - const json = row.toJSON(); - console.log( - `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` LastContactDate: ${JSON.stringify(date).substring(1, 11)}` - ); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_date_parameter] -} - -async function queryWithFloat(instanceId, databaseId, projectId) { - // [START spanner_query_with_float_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'float64', - }; - - const exampleFloat = Spanner.float(0.8); - - const query = { - sql: `SELECT VenueId, VenueName, PopularityScore FROM Venues - WHERE PopularityScore > @popularityScore`, - params: { - popularityScore: exampleFloat, - }, - types: { - popularityScore: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const json = row.toJSON(); - console.log( - `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` PopularityScore: ${json.PopularityScore}` - ); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_float_parameter] -} - -async function queryWithInt(instanceId, databaseId, projectId) { - // [START spanner_query_with_int_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'int64', - }; - - const exampleInt = 3000; - - const query = { - sql: `SELECT VenueId, VenueName, Capacity FROM Venues - WHERE Capacity >= @capacity`, - params: { - capacity: exampleInt, - }, - types: { - capacity: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const json = row.toJSON(); - console.log( - `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` Capacity: ${json.Capacity}` - ); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_int_parameter] -} - -async function queryWithString(instanceId, databaseId, projectId) { - // [START spanner_query_with_string_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'string', - }; - - const exampleString = 'Venue 42'; - - const query = { - sql: `SELECT VenueId, VenueName FROM Venues - WHERE VenueName = @venueName`, - params: { - venueName: exampleString, - }, - types: { - venueName: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const json = row.toJSON(); - console.log(`VenueId: ${json.VenueId}, VenueName: ${json.VenueName}`); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_string_parameter] -} - -async function queryWithTimestamp(instanceId, databaseId, projectId) { - // [START spanner_query_with_timestamp_parameter] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); - - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client. - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const fieldType = { - type: 'timestamp', - }; - - const exampleTimestamp = new Date().toISOString(); - - const query = { - sql: `SELECT VenueId, VenueName, LastUpdateTime FROM Venues - WHERE LastUpdateTime < @lastUpdateTime`, - params: { - lastUpdateTime: exampleTimestamp, - }, - types: { - lastUpdateTime: fieldType, - }, - }; - - // Queries rows from the Venues table. - try { - const [rows] = await database.run(query); - - rows.forEach(row => { - const json = row.toJSON(); - console.log( - `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` LastUpdateTime: ${json.LastUpdateTime}` - ); - }); - } catch (err) { - console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); - } - // [END spanner_query_with_timestamp_parameter] -} - const {addNumericColumn} = require('./numeric-add-column'); -const {updateWithNumericData} = require('./numeric-update-data'); -const {queryWithNumericParameter} = require('./numeric-query-parameter'); - const {addJsonColumn} = require('./json-add-column'); -const {updateWithJsonData} = require('./json-update-data'); -const {queryWithJsonParameter} = require('./json-query-parameter'); require('yargs') .demand(1) @@ -624,62 +78,6 @@ require('yargs') opts => createVenuesTable(opts.instanceName, opts.databaseName, opts.projectId) ) - .command( - 'insertData ', - 'Inserts new rows of data into an sample "Venues" Cloud Spanner table.', - {}, - opts => insertData(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithArray ', - "Query data from the sample 'Venues' table with an ARRAY datatype.", - {}, - opts => queryWithArray(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithBool ', - "Query data from the sample 'Venues' table with a BOOL datatype.", - {}, - opts => queryWithBool(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithBytes ', - "Query data from the sample 'Venues' table with a BYTES datatype.", - {}, - opts => queryWithBytes(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithDate ', - "Query data from the sample 'Venues' table with a DATE datatype.", - {}, - opts => queryWithDate(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithFloat ', - "Query data from the sample 'Venues' table with a FLOAT64 datatype.", - {}, - opts => queryWithFloat(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithInt ', - "Query data from the sample 'Venues' table with a INT64 datatype.", - {}, - opts => queryWithInt(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithString ', - "Query data from the sample 'Venues' table with a STRING datatype.", - {}, - opts => - queryWithString(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithTimestamp ', - "Query data from the sample 'Venues' table with a TIMESTAMP datatype.", - {}, - opts => - queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) - ) .command( 'addNumericColumn ', 'Adds a "Revenue" column to sample "Venues" table in a Cloud Spanner database.', @@ -687,77 +85,19 @@ require('yargs') opts => addNumericColumn(opts.instanceName, opts.databaseName, opts.projectId) ) - .command( - 'updateWithNumericData ', - 'Updates rows to include "Revenue" in sample "Venues" Cloud Spanner table.', - {}, - opts => - updateWithNumericData( - opts.instanceName, - opts.databaseName, - opts.projectId - ) - ) - .command( - 'queryWithNumericParameter ', - "Query data from the sample 'Venues' table with a NUMERIC datatype.", - {}, - opts => - queryWithNumericParameter( - opts.instanceName, - opts.databaseName, - opts.projectId - ) - ) .command( 'addJsonColumn ', 'Adds a "VenueDetails" column to sample "Venues" table in a Cloud Spanner database.', {}, opts => addJsonColumn(opts.instanceName, opts.databaseName, opts.projectId) ) - .command( - 'updateWithJsonData ', - 'Updates rows to include "VenueDetails" in sample "Venues" Cloud Spanner table.', - {}, - opts => - updateWithJsonData(opts.instanceName, opts.databaseName, opts.projectId) - ) - .command( - 'queryWithJsonParameter ', - "Query data from the sample 'Venues' table with a JSON datatype.", - {}, - opts => - queryWithJsonParameter( - opts.instanceName, - opts.databaseName, - opts.projectId - ) - ) .example( 'node $0 createVenuesTable "my-instance" "my-database" "my-project-id"' ) - .example('node $0 insertData "my-instance" "my-database" "my-project-id"') - .example('node $0 queryWithArray "my-instance" "my-database" "my-project-id"') - .example('node $0 queryWithBool "my-instance" "my-database" "my-project-id"') - .example('node $0 queryWithBytes "my-instance" "my-database" "my-project-id"') - .example('node $0 queryWithDate "my-instance" "my-database" "my-project-id"') - .example('node $0 queryWithFloat "my-instance" "my-database" "my-project-id"') - .example('node $0 queryWithInt "my-instance" "my-database" "my-project-id"') - .example( - 'node $0 queryWithString "my-instance" "my-database" "my-project-id"' - ) - .example( - 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' - ) .example( 'node $0 addNumericColumn "my-instance" "my-database" "my-project-id"' ) - .example( - 'node $0 updateWithNumericData "my-instance" "my-database" "my-project-id"' - ) - .example( - 'node $0 queryWithNumericParameter "my-instance" "my-database" "my-project-id"' - ) + .example('node $0 addJsonColumn "my-instance" "my-database" "my-project-id"') .wrap(120) .recommendCommands() .epilogue('For more information, see https://cloud.google.com/spanner/docs') diff --git a/samples/dml.js b/samples/dml.js index e51c89cba..dface9327 100644 --- a/samples/dml.js +++ b/samples/dml.js @@ -288,21 +288,6 @@ function updateUsingDmlWithStruct(instanceId, databaseId, projectId) { params: { name: nameStruct, }, - types: { - name: { - type: 'struct', - fields: [ - { - name: 'FirstName', - type: 'string', - }, - { - name: 'LastName', - type: 'string', - }, - ], - }, - }, }); console.log(`Successfully updated ${rowCount} record.`); diff --git a/samples/enable-fine-grained-access.js b/samples/enable-fine-grained-access.js index c49318b65..0b333f4d1 100644 --- a/samples/enable-fine-grained-access.js +++ b/samples/enable-fine-grained-access.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,7 +37,7 @@ function main( // databaseRole = 'parent'; // title = 'condition title'; // Imports the Google Cloud Spanner client library - const {Spanner} = require('@google-cloud/spanner'); + const {Spanner, protos} = require('@google-cloud/spanner'); // Instantiates a client const spanner = new Spanner({ @@ -45,11 +45,19 @@ function main( }); async function enableFineGrainedAccess() { - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); - const [policy] = await database.getIamPolicy({requestedPolicyVersion: 3}); + const [policy] = await databaseAdminClient.getIamPolicy({ + resource: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + options: (protos.google.iam.v1.GetPolicyOptions = { + requestedPolicyVersion: 3, + }), + }); if (policy.version < 3) { policy.version = 3; } @@ -63,10 +71,26 @@ function main( }, }; policy.bindings.push(newBinding); - await database.setIamPolicy({policy: policy}); + await databaseAdminClient.setIamPolicy({ + resource: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + policy: policy, + }); // Requested Policy Version is Optional. The maximum policy version that will be used to format the policy. // Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. - const newPolicy = await database.getIamPolicy({requestedPolicyVersion: 3}); + const newPolicy = await databaseAdminClient.getIamPolicy({ + resource: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + options: (protos.google.iam.v1.GetPolicyOptions = { + requestedPolicyVersion: 3, + }), + }); console.log(newPolicy); } enableFineGrainedAccess(); diff --git a/samples/get-database-roles.js b/samples/get-database-roles.js index 99a4451e2..a150a6f85 100644 --- a/samples/get-database-roles.js +++ b/samples/get-database-roles.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,22 +30,33 @@ function main( // const instanceId = 'my-instance'; // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function getDatabaseRoles() { - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function getDatabaseRoles() { // Fetching database roles - const [databaseRoles] = await database.getDatabaseRoles(); - console.log(`Roles for Database: ${database.formattedName_}`); + const [databaseRoles] = await databaseAdminClient.listDatabaseRoles({ + parent: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + }); + console.log( + `Roles for Database: ${databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + )}` + ); databaseRoles.forEach(role => { console.log(`Role: ${role.name}`); }); diff --git a/samples/get-instance-config.js b/samples/get-instance-config.js index b769e41c9..ce6a70b76 100644 --- a/samples/get-instance-config.js +++ b/samples/get-instance-config.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -15,12 +15,13 @@ // sample-metadata: // title: Gets the instance config metadata for the configuration nam6 -// usage: node get-instance-config.js +// usage: node get-instance-config.js 'use strict'; function main(projectId) { // [START spanner_get_instance_config] + /** * TODO(developer): Uncomment the following line before running the sample. */ @@ -34,11 +35,15 @@ function main(projectId) { projectId: projectId, }); + const instanceAdminClient = spanner.getInstanceAdminClient(); + async function getInstanceConfig() { // Get the instance config for the multi-region North America 6 (NAM6). // See https://cloud.google.com/spanner/docs/instance-configurations#configuration for a list of all available // configurations. - const [instanceConfig] = await spanner.getInstanceConfig('nam6'); + const [instanceConfig] = await instanceAdminClient.getInstanceConfig({ + name: instanceAdminClient.instanceConfigPath(projectId, 'nam6'), + }); console.log( `Available leader options for instance config ${instanceConfig.name} ('${ instanceConfig.displayName diff --git a/samples/index-create-storing.js b/samples/index-create-storing.js index 72924b874..ec360a274 100644 --- a/samples/index-create-storing.js +++ b/samples/index-create-storing.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,26 +31,31 @@ function main( // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function createStoringIndex() { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function createStoringIndex() { const request = [ 'CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) STORING (MarketingBudget)', ]; // Creates a new index in the database try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -59,8 +64,9 @@ function main( } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } createStoringIndex(); diff --git a/samples/index-create.js b/samples/index-create.js index b220e8991..f0cc17163 100644 --- a/samples/index-create.js +++ b/samples/index-create.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,24 +31,29 @@ function main( // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function createIndex() { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function createIndex() { const request = ['CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)']; // Creates a new index in the database try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -57,8 +62,9 @@ function main( } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } createIndex(); diff --git a/samples/instance-config-create.js b/samples/instance-config-create.js index 7ae6cccff..95686b0d7 100644 --- a/samples/instance-config-create.js +++ b/samples/instance-config-create.js @@ -1,5 +1,5 @@ /** - * Copyright 2022 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -40,22 +40,44 @@ function main( const spanner = new Spanner({ projectId: projectId, }); + + const instanceAdminClient = spanner.getInstanceAdminClient(); + + // Creates a new instance config async function createInstanceConfig() { - // Creates a new instance config - const instanceConfig = spanner.instanceConfig(instanceConfigId); + const [baseInstanceConfig] = await instanceAdminClient.getInstanceConfig({ + name: instanceAdminClient.instanceConfigPath( + projectId, + baseInstanceConfigId + ), + }); try { - const [baseInstanceConfig] = - await spanner.getInstanceConfig(baseInstanceConfigId); - console.log(`Creating instance config ${instanceConfig.formattedName_}.`); - const [, operation] = await instanceConfig.create({ - displayName: instanceConfigId, - baseConfig: baseInstanceConfig.name, - replicas: baseInstanceConfig.replicas.concat( - baseInstanceConfig.optionalReplicas[0] - ), + console.log( + `Creating instance config ${instanceAdminClient.instanceConfigPath( + projectId, + instanceConfigId + )}.` + ); + const [operation] = await instanceAdminClient.createInstanceConfig({ + instanceConfigId: instanceConfigId, + parent: instanceAdminClient.projectPath(projectId), + instanceConfig: { + name: instanceAdminClient.instanceConfigPath( + projectId, + instanceConfigId + ), + baseConfig: instanceAdminClient.instanceConfigPath( + projectId, + baseInstanceConfigId + ), + displayName: instanceConfigId, + replicas: baseInstanceConfig.replicas.concat( + baseInstanceConfig.optionalReplicas[0] + ), + }, }); console.log( - `Waiting for create operation for ${instanceConfig.id} to complete...` + `Waiting for create operation for ${instanceConfigId} to complete...` ); await operation.promise(); console.log(`Created instance config ${instanceConfigId}.`); diff --git a/samples/instance-config-delete.js b/samples/instance-config-delete.js index d28bec969..a56dab6a8 100644 --- a/samples/instance-config-delete.js +++ b/samples/instance-config-delete.js @@ -1,5 +1,5 @@ /** - * Copyright 2022 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -24,7 +24,6 @@ function main( projectId = 'my-project-id' ) { // [START spanner_delete_instance_config] - /** * TODO(developer): Uncomment the following lines before running the sample. */ @@ -38,24 +37,22 @@ function main( const spanner = new Spanner({ projectId: projectId, }); + + const instanceAdminClient = spanner.getInstanceAdminClient(); + async function deleteInstanceConfig() { // Deletes an instance config. - const instanceConfig = spanner.instanceConfig(instanceConfigId); + try { // Delete the instance config. - console.log(`Deleting ${instanceConfig.id}...\n`); - await instanceConfig.delete(); - // Verify that the instance config no longer exists - const exists = await instanceConfig.exists(); - if (exists) { - console.error( - 'Error: Instance config ', - instanceConfigId, - ' still exists' - ); - } else { - console.log(`Deleted instance config ${instanceConfigId}.\n`); - } + console.log(`Deleting ${instanceConfigId}...\n`); + await instanceAdminClient.deleteInstanceConfig({ + name: instanceAdminClient.instanceConfigPath( + projectId, + instanceConfigId + ), + }); + console.log(`Deleted instance config ${instanceConfigId}.\n`); } catch (err) { console.error( 'ERROR: Deleting instance config ', @@ -74,3 +71,5 @@ process.on('unhandledRejection', err => { process.exitCode = 1; }); main(...process.argv.slice(2)); + +/* instance exists needs to be added*/ diff --git a/samples/instance-config-get-operations.js b/samples/instance-config-get-operations.js index b62e0a439..4737ff40e 100644 --- a/samples/instance-config-get-operations.js +++ b/samples/instance-config-get-operations.js @@ -1,5 +1,5 @@ /** - * Copyright 2022 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -34,6 +34,9 @@ function main(projectId = 'my-project-id') { const spanner = new Spanner({ projectId: projectId, }); + + const instanceAdminClient = spanner.getInstanceAdminClient(); + async function getInstanceConfigOperations() { // Lists the instance config operations. try { @@ -41,7 +44,9 @@ function main(projectId = 'my-project-id') { `Getting list of instance config operations on project ${projectId}...\n` ); const [instanceConfigOperations] = - await spanner.getInstanceConfigOperations({ + await instanceAdminClient.listInstanceConfigOperations({ + parent: instanceAdminClient.projectPath(projectId), + // This filter ensures that only operations with metadata type CreateInstanceConfigMetadata filter: '(metadata.@type=type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata)', }); diff --git a/samples/instance-config-update.js b/samples/instance-config-update.js index acb7dc237..59b8bae46 100644 --- a/samples/instance-config-update.js +++ b/samples/instance-config-update.js @@ -1,5 +1,5 @@ /** - * Copyright 2022 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -32,27 +32,44 @@ function main( // const projectId = 'my-project-id'; // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); + const {Spanner, protos} = require('@google-cloud/spanner'); // Creates a client const spanner = new Spanner({ projectId: projectId, }); + + const instanceAdminClient = spanner.getInstanceAdminClient(); + async function updateInstanceConfig() { // Updates an instance config - const instanceConfig = spanner.instanceConfig(instanceConfigId); try { - console.log(`Updating instance config ${instanceConfig.formattedName_}.`); - const [operation] = await instanceConfig.setMetadata({ + console.log( + `Updating instance config ${instanceAdminClient.instanceConfigPath( + projectId, + instanceConfigId + )}.` + ); + const [operation] = await instanceAdminClient.updateInstanceConfig({ instanceConfig: { + name: instanceAdminClient.instanceConfigPath( + projectId, + instanceConfigId + ), displayName: 'updated custom instance config', labels: { updated: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time }, }, + // Field mask specifying fields that should get updated in InstanceConfig + // Only display_name and labels can be updated + updateMask: (protos.google.protobuf.FieldMask = { + paths: ['display_name', 'labels'], + }), }); console.log( - `Waiting for update operation for ${instanceConfig.id} to complete...` + `Waiting for update operation for ${instanceConfigId} to complete...` ); await operation.promise(); console.log(`Updated instance config ${instanceConfigId}.`); diff --git a/samples/instance-with-processing-units.js b/samples/instance-with-processing-units.js index 8d47e87d3..1c4e295c5 100644 --- a/samples/instance-with-processing-units.js +++ b/samples/instance-with-processing-units.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -17,6 +17,7 @@ async function createInstanceWithProcessingUnits(instanceId, projectId) { // [START spanner_create_instance_with_processing_units] + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); @@ -31,27 +32,38 @@ async function createInstanceWithProcessingUnits(instanceId, projectId) { projectId: projectId, }); - const instance = spanner.instance(instanceId); + const instanceAdminClient = spanner.getInstanceAdminClient(); // Creates a new instance try { - console.log(`Creating instance ${instance.formattedName_}.`); - const [, operation] = await instance.create({ - config: 'regional-us-central1', - processingUnits: 500, - displayName: 'This is a display name.', - labels: { - ['cloud_spanner_samples']: 'true', + console.log( + `Creating instance ${instanceAdminClient.instancePath( + projectId, + instanceId + )}.` + ); + const [operation] = await instanceAdminClient.createInstance({ + instanceId: instanceId, + instance: { + config: instanceAdminClient.instanceConfigPath( + projectId, + 'regional-us-central1' + ), + displayName: 'Display name for the instance.', + processingUnits: 500, + labels: { + cloud_spanner_samples: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, }, + parent: instanceAdminClient.projectPath(projectId), }); - console.log(`Waiting for operation on ${instance.id} to complete...`); + console.log(`Waiting for operation on ${instanceId} to complete...`); await operation.promise(); - console.log(`Created instance ${instanceId}.`); - - const [metadata] = await instance.getMetadata({ - fieldNames: ['processingUnits'], + const [metadata] = await instanceAdminClient.getInstance({ + name: instanceAdminClient.instancePath(projectId, instanceId), }); console.log( `Instance ${instanceId} has ${metadata.processingUnits} ` + diff --git a/samples/instance.js b/samples/instance.js index cd9281488..9098abb67 100644 --- a/samples/instance.js +++ b/samples/instance.js @@ -1,5 +1,5 @@ /** - * Copyright 2020 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -15,38 +15,51 @@ 'use strict'; +// creates an instance using Instance Admin Client async function createInstance(instanceId, projectId) { // [START spanner_create_instance] + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // Creates a client const spanner = new Spanner({ projectId: projectId, }); - const instance = spanner.instance(instanceId); + const instanceAdminClient = await spanner.getInstanceAdminClient(); + /** + * TODO(developer): Uncomment the following lines before running the sample. + **/ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; // Creates a new instance try { - console.log(`Creating instance ${instance.formattedName_}.`); - const [, operation] = await instance.create({ - config: 'regional-us-west1', - nodes: 1, - displayName: 'This is a display name.', - labels: { - ['cloud_spanner_samples']: 'true', - created: Math.round(Date.now() / 1000).toString(), // current time + console.log( + `Creating instance ${instanceAdminClient.instancePath( + projectId, + instanceId + )}.` + ); + const [operation] = await instanceAdminClient.createInstance({ + instanceId: instanceId, + parent: instanceAdminClient.projectPath(projectId), + instance: { + config: instanceAdminClient.instanceConfigPath( + projectId, + 'regional-us-central1' + ), + nodeCount: 1, + displayName: 'Display name for the instance.', + labels: { + cloud_spanner_samples: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, }, }); - console.log(`Waiting for operation on ${instance.id} to complete...`); + console.log(`Waiting for operation on ${instanceId} to complete...`); await operation.promise(); console.log(`Created instance ${instanceId}.`); @@ -64,7 +77,7 @@ require('yargs') .demand(1) .command( 'createInstance ', - 'Creates an example instance in a Cloud Spanner instance.', + 'Creates an example instance in a Cloud Spanner instance using Instance Admin Client.', {}, opts => createInstance(opts.instanceName, opts.projectId) ) diff --git a/samples/json-add-column.js b/samples/json-add-column.js index 0eed31d0f..1ce0331b1 100644 --- a/samples/json-add-column.js +++ b/samples/json-add-column.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,8 +16,6 @@ async function addJsonColumn(instanceId, databaseId, projectId) { // [START spanner_add_json_column] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); /** * TODO(developer): Uncomment the following lines before running the sample. @@ -26,19 +24,27 @@ async function addJsonColumn(instanceId, databaseId, projectId) { // const instanceId = 'my-instance'; // const databaseId = 'my-database'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = ['ALTER TABLE Venues ADD COLUMN VenueDetails JSON']; // Alter existing table to add a column. - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); diff --git a/samples/list-databases.js b/samples/list-databases.js index 642600ff6..e24dbae6f 100644 --- a/samples/list-databases.js +++ b/samples/list-databases.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -30,22 +30,24 @@ function main(instanceId, projectId) { // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Creates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); + + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function listDatabases() { // Lists all databases on the instance. - const [databases] = await instance.getDatabases(); + const [databases] = await databaseAdminClient.listDatabases({ + parent: databaseAdminClient.instancePath(projectId, instanceId), + }); console.log(`Databases for projects/${projectId}/instances/${instanceId}:`); databases.forEach(database => { - const defaultLeader = database.metadata.defaultLeader - ? `(default leader = ${database.metadata.defaultLeader})` + const defaultLeader = database.defaultLeader + ? `(default leader = ${database.defaultLeader})` : ''; - console.log(`\t${database.id} ${defaultLeader}`); + console.log(`\t${database.name} ${defaultLeader}`); }); } listDatabases(); diff --git a/samples/list-instance-configs.js b/samples/list-instance-configs.js index 7171acf80..27017f38e 100644 --- a/samples/list-instance-configs.js +++ b/samples/list-instance-configs.js @@ -1,5 +1,5 @@ /** - * Copyright 2021 Google LLC + * Copyright 2024 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -34,11 +34,15 @@ function main(projectId) { projectId: projectId, }); + const instanceAdminClient = spanner.getInstanceAdminClient(); + async function listInstanceConfigs() { // Lists all available instance configurations in the project. // See https://cloud.google.com/spanner/docs/instance-configurations#configuration for a list of all available // configurations. - const [instanceConfigs] = await spanner.getInstanceConfigs(); + const [instanceConfigs] = await instanceAdminClient.listInstanceConfigs({ + parent: instanceAdminClient.projectPath(projectId), + }); console.log(`Available instance configs for project ${projectId}:`); instanceConfigs.forEach(instanceConfig => { console.log( diff --git a/samples/numeric-add-column.js b/samples/numeric-add-column.js index 4ab81a0ea..85a280e1d 100644 --- a/samples/numeric-add-column.js +++ b/samples/numeric-add-column.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,8 +16,6 @@ async function addNumericColumn(instanceId, databaseId, projectId) { // [START spanner_add_numeric_column] - // Imports the Google Cloud client library. - const {Spanner} = require('@google-cloud/spanner'); /** * TODO(developer): Uncomment the following lines before running the sample. @@ -26,19 +24,27 @@ async function addNumericColumn(instanceId, databaseId, projectId) { // const instanceId = 'my-instance'; // const databaseId = 'my-database'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = ['ALTER TABLE Venues ADD COLUMN Revenue NUMERIC']; // Alter existing table to add a column. - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); diff --git a/samples/package.json b/samples/package.json index fe533f032..6f412fcfd 100644 --- a/samples/package.json +++ b/samples/package.json @@ -16,7 +16,7 @@ "dependencies": { "@google-cloud/kms": "^4.0.0", "@google-cloud/precise-date": "^4.0.0", - "@google-cloud/spanner": "^7.4.0", + "@google-cloud/spanner": "^7.5.0", "yargs": "^17.0.0" }, "devDependencies": { diff --git a/samples/pg-add-column.js b/samples/pg-add-column.js index a64ef4872..dc8daceb4 100644 --- a/samples/pg-add-column.js +++ b/samples/pg-add-column.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,22 +31,28 @@ function main( // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function pgAddColumn() { - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function pgAddColumn() { const request = ['ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT']; // Alter existing table to add a column. - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); diff --git a/samples/pg-database-create.js b/samples/pg-database-create.js index 4eb5abc7d..7f59d4a72 100644 --- a/samples/pg-database-create.js +++ b/samples/pg-database-create.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,35 +31,33 @@ function main( // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library - const {Spanner} = require('@google-cloud/spanner'); + // Imports the Google Cloud client library + const {Spanner, protos} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function createPgDatabase() { - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); - - // Set Dialect as PostgreSQL - const request = { - databaseDialect: Spanner.POSTGRESQL, - }; + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function createPgDatabase() { // Creates a PostgreSQL database. PostgreSQL create requests may not contain any additional // DDL statements. We need to execute these separately after the database has been created. - const [database, operationCreate] = await instance.createDatabase( - databaseId, - request - ); + const [operationCreate] = await databaseAdminClient.createDatabase({ + createStatement: 'CREATE DATABASE "' + databaseId + '"', + parent: databaseAdminClient.instancePath(projectId, instanceId), + databaseDialect: + protos.google.spanner.admin.database.v1.DatabaseDialect.POSTGRESQL, + }); - console.log(`Waiting for operation on ${database.id} to complete...`); + console.log(`Waiting for operation on ${databaseId} to complete...`); await operationCreate.promise(); - await database.getMetadata(); + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), + }); console.log( - `Created database ${databaseId} on instance ${instanceId} with dialect ${database.metadata.databaseDialect}.` + `Created database ${databaseId} on instance ${instanceId} with dialect ${metadata.databaseDialect}.` ); // Create a couple of tables using a separate request. We must use PostgreSQL style DDL as the @@ -80,7 +78,14 @@ function main( PRIMARY KEY (AlbumId) );`, ]; - const [operationUpdateDDL] = await database.updateSchema(statements); + const [operationUpdateDDL] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: [statements], + }); await operationUpdateDDL.promise(); console.log('Updated schema'); } diff --git a/samples/pg-index-create-storing.js b/samples/pg-index-create-storing.js index 878745ab3..c7130cd26 100644 --- a/samples/pg-index-create-storing.js +++ b/samples/pg-index-create-storing.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,26 +31,31 @@ function main( // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function pgCreateStoringIndex() { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function pgCreateStoringIndex() { const request = [ 'CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle) INCLUDE(MarketingBudget)', ]; // Creates a new index in the database try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -59,8 +64,9 @@ function main( } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } pgCreateStoringIndex(); diff --git a/samples/pg-interleaving.js b/samples/pg-interleaving.js index c61a594b6..efda66436 100644 --- a/samples/pg-interleaving.js +++ b/samples/pg-interleaving.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,19 +30,18 @@ function main( // const instanceId = 'my-instance'; // const databaseId = 'my-database'; // const projectId = 'my-project-id'; - // Imports the Google Cloud Spanner client library + + // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - // Instantiates a client + // creates a client const spanner = new Spanner({ projectId: projectId, }); - async function pgInterleaving() { - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + async function pgInterleaving() { const statements = [ `CREATE TABLE Author (AuthorId bigint NOT NULL, @@ -60,7 +59,14 @@ function main( ]; // Updates schema by adding new tables. - const [operation] = await database.updateSchema(statements); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: statements, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); diff --git a/samples/pg-jsonb-add-column.js b/samples/pg-jsonb-add-column.js index d41ffd451..358ffa6be 100644 --- a/samples/pg-jsonb-add-column.js +++ b/samples/pg-jsonb-add-column.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,14 +40,20 @@ function main( }); async function pgJsonbAddColumn() { - // Gets a reference to a Cloud Spanner instance and database. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = ['ALTER TABLE Venues ADD COLUMN VenueDetails JSONB']; // Updates schema by adding a new table. - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); console.log( diff --git a/samples/pg-sequence-alter.js b/samples/pg-sequence-alter.js index 9992cb3b5..6a89339e5 100644 --- a/samples/pg-sequence-alter.js +++ b/samples/pg-sequence-alter.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -36,14 +36,20 @@ async function main(instanceId, databaseId, projectId) { }); async function alterSequence(instanceId, databaseId) { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = ['ALTER SEQUENCE Seq SKIP RANGE 1000 5000000']; try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -54,6 +60,11 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + database.runTransaction(async (err, transaction) => { if (err) { console.error(err); @@ -79,8 +90,9 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } }); } diff --git a/samples/pg-sequence-create.js b/samples/pg-sequence-create.js index 4b3077d7d..7bbe34888 100644 --- a/samples/pg-sequence-create.js +++ b/samples/pg-sequence-create.js @@ -36,9 +36,8 @@ async function main(instanceId, databaseId, projectId) { }); async function createSequence(instanceId, databaseId) { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ 'CREATE SEQUENCE Seq BIT_REVERSED_POSITIVE', @@ -47,7 +46,14 @@ async function main(instanceId, databaseId, projectId) { // Creates a new table with sequence try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -58,6 +64,11 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + database.runTransaction(async (err, transaction) => { if (err) { console.error(err); @@ -83,8 +94,9 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } }); } diff --git a/samples/pg-sequence-drop.js b/samples/pg-sequence-drop.js index 3cdbfb1f2..760575b4a 100644 --- a/samples/pg-sequence-drop.js +++ b/samples/pg-sequence-drop.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -36,9 +36,8 @@ async function main(instanceId, databaseId, projectId) { }); async function dropSequence(instanceId, databaseId) { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ 'ALTER TABLE Customers ALTER COLUMN CustomerId DROP DEFAULT', @@ -47,7 +46,14 @@ async function main(instanceId, databaseId, projectId) { // Drop sequence from DDL try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -58,8 +64,9 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } await dropSequence(instanceId, databaseId); diff --git a/samples/schema.js b/samples/schema.js index 59ffc0e2e..9dead0878 100644 --- a/samples/schema.js +++ b/samples/schema.js @@ -1,23 +1,23 @@ -// Copyright 2017 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2024 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -async function createDatabase(instanceId, databaseId, projectId) { +// creates a database using Database Admin Client +async function createDatabase(instanceID, databaseID, projectID) { // [START spanner_create_database] - // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); /** * TODO(developer): Uncomment the following lines before running the sample. @@ -26,96 +26,55 @@ async function createDatabase(instanceId, databaseId, projectId) { // const instanceId = 'my-instance'; // const databaseId = 'my-database'; - // Creates a client - const spanner = new Spanner({ - projectId: projectId, - }); - - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); - - // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they - // must be converted to strings before being inserted as INT64s - const request = { - schema: [ - `CREATE TABLE Singers ( - SingerId INT64 NOT NULL, - FirstName STRING(1024), - LastName STRING(1024), - SingerInfo BYTES(MAX), - FullName STRING(2048) AS (ARRAY_TO_STRING([FirstName, LastName], " ")) STORED, - ) PRIMARY KEY (SingerId)`, - `CREATE TABLE Albums ( - SingerId INT64 NOT NULL, - AlbumId INT64 NOT NULL, - AlbumTitle STRING(MAX) - ) PRIMARY KEY (SingerId, AlbumId), - INTERLEAVE IN PARENT Singers ON DELETE CASCADE`, - ], - }; - - // Creates a database - const [database, operation] = await instance.createDatabase( - databaseId, - request - ); - - console.log(`Waiting for operation on ${database.id} to complete...`); - await operation.promise(); - - console.log(`Created database ${databaseId} on instance ${instanceId}.`); - // [END spanner_create_database] -} - -async function addColumn(instanceId, databaseId, projectId) { - // [START spanner_add_column] // Imports the Google Cloud client library const {Spanner} = require('@google-cloud/spanner'); - /** - * TODO(developer): Uncomment the following lines before running the sample. - */ - // const projectId = 'my-project-id'; - // const instanceId = 'my-instance'; - // const databaseId = 'my-database'; - - // Creates a client + // creates a client const spanner = new Spanner({ - projectId: projectId, + projectId: projectID, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const request = ['ALTER TABLE Albums ADD COLUMN MarketingBudget INT64']; - - // Creates a new index in the database + const databaseAdminClient = spanner.getDatabaseAdminClient(); + + const createSingersTableStatement = ` + CREATE TABLE Singers ( + SingerId INT64 NOT NULL, + FirstName STRING(1024), + LastName STRING(1024), + SingerInfo BYTES(MAX) + ) PRIMARY KEY (SingerId)`; + const createAlbumsTableStatement = ` + CREATE TABLE Albums ( + SingerId INT64 NOT NULL, + AlbumId INT64 NOT NULL, + AlbumTitle STRING(MAX) + ) PRIMARY KEY (SingerId, AlbumId), + INTERLEAVE IN PARENT Singers ON DELETE CASCADE`; + + // Creates a new database try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.createDatabase({ + createStatement: 'CREATE DATABASE `' + databaseID + '`', + extraStatements: [ + createSingersTableStatement, + createAlbumsTableStatement, + ], + parent: databaseAdminClient.instancePath(projectID, instanceID), + }); - console.log('Waiting for operation to complete...'); + console.log(`Waiting for creation of ${databaseID} to complete...`); await operation.promise(); - console.log('Added the MarketingBudget column.'); + console.log(`Created database ${databaseID} on instance ${instanceID}.`); } catch (err) { console.error('ERROR:', err); - } finally { - // Close the database when finished. - database.close(); } - // [END spanner_add_column] -} -async function queryDataWithNewColumn(instanceId, databaseId, projectId) { - // [START spanner_query_data_with_new_column] - // This sample uses the `MarketingBudget` column. You can add the column - // by running the `add_column` sample or by running this DDL statement against - // your database: - // ALTER TABLE Albums ADD COLUMN MarketingBudget INT64 + // [END spanner_create_database] +} - // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); +async function addColumn(instanceId, databaseId, projectId) { + // [START spanner_add_column] /** * TODO(developer): Uncomment the following lines before running the sample. @@ -124,46 +83,46 @@ async function queryDataWithNewColumn(instanceId, databaseId, projectId) { // const instanceId = 'my-instance'; // const databaseId = 'my-database'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); - - const query = { - sql: 'SELECT SingerId, AlbumId, MarketingBudget FROM Albums', - }; + const databaseAdminClient = spanner.getDatabaseAdminClient(); - // Queries rows from the Albums table + // Creates a new index in the database try { - const [rows] = await database.run(query); - - rows.forEach(async row => { - const json = row.toJSON(); - - console.log( - `SingerId: ${json.SingerId}, AlbumId: ${ - json.AlbumId - }, MarketingBudget: ${ - json.MarketingBudget ? json.MarketingBudget : null - }` - ); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: ['ALTER TABLE Albums ADD COLUMN MarketingBudget INT64'], }); + + console.log('Waiting for operation to complete...'); + await operation.promise(); + + console.log('Added the MarketingBudget column.'); } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } - // [END spanner_query_data_with_new_column] + + // [END spanner_add_column] } const { createDatabaseWithVersionRetentionPeriod, } = require('./database-create-with-version-retention-period'); + const { createDatabaseWithEncryptionKey, } = require('./database-create-with-encryption-key'); @@ -172,59 +131,47 @@ require('yargs') .demand(1) .command( 'createDatabase ', - 'Creates an example database with two tables in a Cloud Spanner instance.', + 'Creates an example database with two tables in a Cloud Spanner instance using Database Admin Client.', {}, opts => createDatabase(opts.instanceName, opts.databaseName, opts.projectId) ) - .command( - 'createDatabaseWithEncryptionKey ', - 'Creates an example database using given encryption key in a Cloud Spanner instance.', - {}, - opts => - createDatabaseWithEncryptionKey( - opts.instanceName, - opts.databaseName, - opts.projectId, - opts.keyName - ) - ) + .example('node $0 createDatabase "my-instance" "my-database" "my-project-id"') .command( 'addColumn ', 'Adds an example MarketingBudget column to an example Cloud Spanner table.', {}, opts => addColumn(opts.instanceName, opts.databaseName, opts.projectId) ) + .example('node $0 addColumn "my-instance" "my-database" "my-project-id"') .command( - 'queryNewColumn ', - 'Executes a read-only SQL query against an example Cloud Spanner table with an additional column (MarketingBudget) added by addColumn.', + 'createDatabaseWithVersionRetentionPeriod ', + 'Creates a database with a version retention period.', {}, opts => - queryDataWithNewColumn( + createDatabaseWithVersionRetentionPeriod( opts.instanceName, - opts.databaseName, + opts.databaseId, opts.projectId ) ) + .example( + 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"' + ) .command( - 'createDatabaseWithVersionRetentionPeriod ', - 'Creates a database with a version retention period.', + 'createDatabaseWithEncryptionKey ', + 'Creates an example database using given encryption key in a Cloud Spanner instance.', {}, opts => - createDatabaseWithVersionRetentionPeriod( + createDatabaseWithEncryptionKey( opts.instanceName, - opts.databaseId, - opts.projectId + opts.databaseName, + opts.projectId, + opts.keyName ) ) - .example('node $0 createDatabase "my-instance" "my-database" "my-project-id"') .example( 'node $0 createDatabaseWithEncryptionKey "my-instance" "my-database" "my-project-id" "key-name"' ) - .example('node $0 addColumn "my-instance" "my-database" "my-project-id"') - .example('node $0 queryNewColumn "my-instance" "my-database" "my-project-id"') - .example( - 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"' - ) .wrap(120) .recommendCommands() .epilogue('For more information, see https://cloud.google.com/spanner/docs') diff --git a/samples/sequence-alter.js b/samples/sequence-alter.js index 1fe9a64e7..b2dde1ea6 100644 --- a/samples/sequence-alter.js +++ b/samples/sequence-alter.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,6 +20,7 @@ async function main(instanceId, databaseId, projectId) { // [START spanner_alter_sequence] + // Imports the Google Cloud client library. const {Spanner} = require('@google-cloud/spanner'); @@ -36,16 +37,22 @@ async function main(instanceId, databaseId, projectId) { }); async function alterSequence(instanceId, databaseId) { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ 'ALTER SEQUENCE Seq SET OPTIONS (skip_range_min = 1000, skip_range_max = 5000000)', ]; try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -56,6 +63,11 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + database.runTransaction(async (err, transaction) => { if (err) { console.error(err); diff --git a/samples/sequence-create.js b/samples/sequence-create.js index 643440e8d..dd4c11424 100644 --- a/samples/sequence-create.js +++ b/samples/sequence-create.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -36,9 +36,8 @@ async function main(instanceId, databaseId, projectId) { }); async function createSequence(instanceId, databaseId) { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ "CREATE SEQUENCE Seq OPTIONS (sequence_kind = 'bit_reversed_positive')", @@ -47,7 +46,14 @@ async function main(instanceId, databaseId, projectId) { // Creates a new table with sequence try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -58,6 +64,11 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } + + // Gets a reference to a Cloud Spanner instance and database + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + database.runTransaction(async (err, transaction) => { if (err) { console.error(err); diff --git a/samples/sequence-drop.js b/samples/sequence-drop.js index 513b112d3..4693cfd5f 100644 --- a/samples/sequence-drop.js +++ b/samples/sequence-drop.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -36,9 +36,8 @@ async function main(instanceId, databaseId, projectId) { }); async function dropSequence(instanceId, databaseId) { - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ 'ALTER TABLE Customers ALTER COLUMN CustomerId DROP DEFAULT', @@ -47,7 +46,14 @@ async function main(instanceId, databaseId, projectId) { // Drop sequence from DDL try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); await operation.promise(); @@ -58,8 +64,9 @@ async function main(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - await database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } } await dropSequence(instanceId, databaseId); diff --git a/samples/struct.js b/samples/struct.js index 3efd08127..a484b7ee1 100644 --- a/samples/struct.js +++ b/samples/struct.js @@ -111,21 +111,6 @@ async function queryDataWithStruct(instanceId, databaseId, projectId) { params: { name: nameStruct, }, - types: { - name: { - type: 'struct', - fields: [ - { - name: 'FirstName', - type: 'string', - }, - { - name: 'LastName', - type: 'string', - }, - ], - }, - }, }; // Queries rows from the Singers table @@ -265,21 +250,6 @@ async function queryStructField(instanceId, databaseId, projectId) { params: { name: nameStruct, }, - types: { - name: { - type: 'struct', - fields: [ - { - name: 'FirstName', - type: 'string', - }, - { - name: 'LastName', - type: 'string', - }, - ], - }, - }, }; // Queries rows from the Singers table diff --git a/samples/system-test/archived/spanner.test.js b/samples/system-test/archived/spanner.test.js new file mode 100644 index 000000000..e639490b4 --- /dev/null +++ b/samples/system-test/archived/spanner.test.js @@ -0,0 +1,2013 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {Spanner} = require('@google-cloud/spanner'); +const {KeyManagementServiceClient} = require('@google-cloud/kms'); +const {assert} = require('chai'); +const {describe, it, before, after, afterEach} = require('mocha'); +const cp = require('child_process'); +const pLimit = require('p-limit'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const batchCmd = 'node batch.js'; +const crudCmd = 'node crud.js'; +const schemaCmd = 'node archived/schema.js'; +const queryOptionsCmd = 'node queryoptions.js'; +const rpcPriorityRunCommand = 'node rpc-priority-run.js'; +const rpcPriorityReadCommand = 'node rpc-priority-read.js'; +const rpcPriorityBatchDMLCommand = 'node rpc-priority-batch-dml.js'; +const rpcPriorityPartitionedDMLCommand = 'node rpc-priority-partitioned-dml.js'; +const rpcPriorityTransactionCommand = 'node rpc-priority-transaction.js'; +const rpcPriorityQueryPartitionsCommand = + 'node rpc-priority-query-partitions.js'; +const transactionCmd = 'node transaction.js'; +const transactionTagCommand = 'node transaction-tag.js'; +const requestTagCommand = 'node request-tag.js'; +const timestampCmd = 'node archived/timestamp.js'; +const structCmd = 'node struct.js'; +const dmlCmd = 'node dml.js'; +const datatypesCmd = 'node archived/datatypes.js'; +const backupsCmd = 'node archived/backups.js'; +const instanceCmd = 'node archived/instance.js'; +const createTableWithForeignKeyDeleteCascadeCommand = + 'node archived/table-create-with-foreign-key-delete-cascade.js'; +const alterTableWithForeignKeyDeleteCascadeCommand = + 'node archived/table-alter-with-foreign-key-delete-cascade.js'; +const dropForeignKeyConstraintDeleteCascaseCommand = + 'node archived/table-drop-foreign-key-constraint-delete-cascade.js'; + +const CURRENT_TIME = Math.round(Date.now() / 1000).toString(); +const PROJECT_ID = process.env.GCLOUD_PROJECT; +const PREFIX = 'test-instance'; +const INSTANCE_ID = + process.env.SPANNERTEST_INSTANCE || `${PREFIX}-${CURRENT_TIME}`; +const SAMPLE_INSTANCE_ID = `${PREFIX}-my-sample-instance-${CURRENT_TIME}`; +const SAMPLE_INSTANCE_CONFIG_ID = `custom-my-sample-instance-config-${CURRENT_TIME}`; +const BASE_INSTANCE_CONFIG_ID = 'regional-us-west2'; +const INSTANCE_ALREADY_EXISTS = !!process.env.SPANNERTEST_INSTANCE; +const DATABASE_ID = `test-database-${CURRENT_TIME}`; +const PG_DATABASE_ID = `test-pg-database-${CURRENT_TIME}`; +const RESTORE_DATABASE_ID = `test-database-${CURRENT_TIME}-r`; +const ENCRYPTED_RESTORE_DATABASE_ID = `test-database-${CURRENT_TIME}-r-enc`; +const VERSION_RETENTION_DATABASE_ID = `test-database-${CURRENT_TIME}-v`; +const ENCRYPTED_DATABASE_ID = `test-database-${CURRENT_TIME}-enc`; +const DEFAULT_LEADER_DATABASE_ID = `test-database-${CURRENT_TIME}-dl`; +const SEQUENCE_DATABASE_ID = `test-seq-database-${CURRENT_TIME}-r`; +const BACKUP_ID = `test-backup-${CURRENT_TIME}`; +const COPY_BACKUP_ID = `test-copy-backup-${CURRENT_TIME}`; +const ENCRYPTED_BACKUP_ID = `test-backup-${CURRENT_TIME}-enc`; +const CANCELLED_BACKUP_ID = `test-backup-${CURRENT_TIME}-c`; +const LOCATION_ID = 'regional-us-central1'; +const PG_LOCATION_ID = 'regional-us-west2'; +const KEY_LOCATION_ID = 'us-central1'; +const KEY_RING_ID = 'test-key-ring-node'; +const KEY_ID = 'test-key'; +const DEFAULT_LEADER = 'us-central1'; +const DEFAULT_LEADER_2 = 'us-east1'; + +const spanner = new Spanner({ + projectId: PROJECT_ID, +}); +const LABEL = 'node-sample-tests'; +const GAX_OPTIONS = { + retry: { + retryCodes: [4, 8, 14], + backoffSettings: { + initialRetryDelayMillis: 1000, + retryDelayMultiplier: 1.3, + maxRetryDelayMillis: 32000, + initialRpcTimeoutMillis: 60000, + rpcTimeoutMultiplier: 1, + maxRpcTimeoutMillis: 60000, + totalTimeoutMillis: 600000, + }, + }, +}; + +const delay = async test => { + const retries = test.currentRetry(); + // No retry on the first failure. + if (retries === 0) return; + // See: https://cloud.google.com/storage/docs/exponential-backoff + const ms = Math.pow(2, retries) + Math.random() * 1000; + return new Promise(done => { + console.info(`retrying "${test.title}" in ${ms}ms`); + setTimeout(done, ms); + }); +}; + +async function deleteStaleInstances() { + let [instances] = await spanner.getInstances({ + filter: `(labels.${LABEL}:true) OR (labels.cloud_spanner_samples:true)`, + }); + const old = new Date(); + old.setHours(old.getHours() - 4); + + instances = instances.filter(instance => { + return ( + instance.metadata.labels['created'] && + new Date(parseInt(instance.metadata.labels['created']) * 1000) < old + ); + }); + const limit = pLimit(5); + await Promise.all( + instances.map(instance => + limit(() => setTimeout(deleteInstance, delay, instance)) + ) + ); +} + +async function deleteInstance(instance) { + const [backups] = await instance.getBackups(); + await Promise.all(backups.map(backup => backup.delete(GAX_OPTIONS))); + return instance.delete(GAX_OPTIONS); +} + +async function getCryptoKey() { + const NOT_FOUND = 5; + + // Instantiates a client. + const client = new KeyManagementServiceClient(); + + // Build the parent key ring name. + const keyRingName = client.keyRingPath( + PROJECT_ID, + KEY_LOCATION_ID, + KEY_RING_ID + ); + + // Get key ring. + try { + await client.getKeyRing({name: keyRingName}); + } catch (err) { + // Create key ring if it doesn't exist. + if (err.code === NOT_FOUND) { + // Build the parent location name. + const locationName = client.locationPath(PROJECT_ID, KEY_LOCATION_ID); + await client.createKeyRing({ + parent: locationName, + keyRingId: KEY_RING_ID, + }); + } else { + throw err; + } + } + + // Get key. + try { + // Build the key name + const keyName = client.cryptoKeyPath( + PROJECT_ID, + KEY_LOCATION_ID, + KEY_RING_ID, + KEY_ID + ); + const [key] = await client.getCryptoKey({ + name: keyName, + }); + return key; + } catch (err) { + // Create key if it doesn't exist. + if (err.code === NOT_FOUND) { + const [key] = await client.createCryptoKey({ + parent: keyRingName, + cryptoKeyId: KEY_ID, + cryptoKey: { + purpose: 'ENCRYPT_DECRYPT', + versionTemplate: { + algorithm: 'GOOGLE_SYMMETRIC_ENCRYPTION', + }, + }, + }); + return key; + } else { + throw err; + } + } +} + +describe('Spanner', () => { + const instance = spanner.instance(INSTANCE_ID); + + before(async () => { + await deleteStaleInstances(); + + if (!INSTANCE_ALREADY_EXISTS) { + const [, operation] = await instance.create({ + config: LOCATION_ID, + nodes: 1, + labels: { + [LABEL]: 'true', + created: CURRENT_TIME, + }, + gaxOptions: GAX_OPTIONS, + }); + return operation.promise(); + } else { + console.log( + `Not creating temp instance, using + ${instance.formattedName_}...` + ); + } + }); + + after(async () => { + const instance = spanner.instance(INSTANCE_ID); + + if (!INSTANCE_ALREADY_EXISTS) { + // Make sure all backups are deleted before an instance can be deleted. + await Promise.all([ + instance.backup(BACKUP_ID).delete(GAX_OPTIONS), + instance.backup(ENCRYPTED_BACKUP_ID).delete(GAX_OPTIONS), + instance.backup(COPY_BACKUP_ID).delete(GAX_OPTIONS), + instance.backup(CANCELLED_BACKUP_ID).delete(GAX_OPTIONS), + ]); + await instance.delete(GAX_OPTIONS); + } else { + await Promise.all([ + instance.database(DATABASE_ID).delete(), + instance.database(PG_DATABASE_ID).delete(), + instance.database(RESTORE_DATABASE_ID).delete(), + instance.database(ENCRYPTED_RESTORE_DATABASE_ID).delete(), + instance.backup(BACKUP_ID).delete(GAX_OPTIONS), + instance.backup(COPY_BACKUP_ID).delete(GAX_OPTIONS), + instance.backup(ENCRYPTED_BACKUP_ID).delete(GAX_OPTIONS), + instance.backup(CANCELLED_BACKUP_ID).delete(GAX_OPTIONS), + ]); + } + await spanner.instance(SAMPLE_INSTANCE_ID).delete(GAX_OPTIONS); + }); + + describe('instance', () => { + afterEach(async () => { + const sample_instance = spanner.instance(SAMPLE_INSTANCE_ID); + await sample_instance.delete(); + }); + + // create_instance + it('should create an example instance', async () => { + const output = execSync( + `${instanceCmd} createInstance "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + ); + }); + + // create_instance_with_processing_units + it('should create an example instance with processing units', async () => { + const output = execSync( + `${instanceCmd} createInstanceWithProcessingUnits "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + ); + assert.match( + output, + new RegExp(`Instance ${SAMPLE_INSTANCE_ID} has 500 processing units.`) + ); + }); + }); + + // check that base instance was created + it('should have created an instance', async () => { + const [exists] = await instance.exists(); + assert.strictEqual( + exists, + true, + 'The main instance was not created successfully!' + ); + }); + + // create_database + it('should create an example database', async () => { + const output = execSync( + `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp(`Created database ${DATABASE_ID} on instance ${INSTANCE_ID}.`) + ); + }); + + // update_database + it('should set database metadata', async () => { + const output = execSync( + `node database-update.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for update operation for ${DATABASE_ID} to complete...` + ) + ); + assert.match(output, new RegExp(`Updated database ${DATABASE_ID}.`)); + // cleanup + const [operation] = await instance + .database(DATABASE_ID) + .setMetadata({enableDropProtection: false}); + await operation.promise(); + }); + + describe('encrypted database', () => { + after(async () => { + const instance = spanner.instance(INSTANCE_ID); + const encrypted_database = instance.database(ENCRYPTED_DATABASE_ID); + await encrypted_database.delete(); + }); + + // create_database_with_encryption_key + it('should create a database with an encryption key', async () => { + const key = await getCryptoKey(); + + const output = execSync( + `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" ${PROJECT_ID} "${key.name}"` + ); + assert.match( + output, + new RegExp( + `Waiting for operation on ${ENCRYPTED_DATABASE_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp( + `Created database ${ENCRYPTED_DATABASE_ID} on instance ${INSTANCE_ID}.` + ) + ); + assert.match( + output, + new RegExp(`Database encrypted with key ${key.name}.`) + ); + }); + }); + + describe('quickstart', () => { + // Running the quickstart test in here since there's already a spanner + // instance and database set up at this point. + it('should query a table', async () => { + const output = execSync( + `node quickstart ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + ); + assert.match(output, /Query: \d+ found./); + }); + }); + + // insert_data + it('should insert rows into an example table', async () => { + const output = execSync( + `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Inserted data\./); + }); + + // delete_data + it('should delete and then insert rows in the example tables', async () => { + let output = execSync( + `${crudCmd} delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.include(output, 'Deleted individual rows in Albums.'); + assert.include(output, '2 records deleted from Singers.'); + assert.include(output, '3 records deleted from Singers.'); + output = execSync( + `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Inserted data\./); + }); + + // query_data + it('should query an example table and return matching rows', async () => { + const output = execSync( + `${crudCmd} query ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); + }); + + // read_data + it('should read an example table', async () => { + const output = execSync( + `${crudCmd} read ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); + }); + + // add_column + it('should add a column to a table', async () => { + const output = execSync( + `${schemaCmd} addColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Waiting for operation to complete\.\.\./); + assert.match(output, /Added the MarketingBudget column\./); + }); + + // update_data + it('should update existing rows in an example table', async () => { + const output = execSync( + `${crudCmd} update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Updated data\./); + }); + + // read_stale_data + it('should read stale data from an example table', async () => { + // read-stale-data reads data that is exactly 15 seconds old. So, make sure + // 15 seconds have elapsed since the update_data test. + await new Promise(r => setTimeout(r, 16000)); + const output = execSync( + `${crudCmd} read-stale ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget: 100000/ + ); + assert.match( + output, + /SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget: 500000/ + ); + }); + + // query_data_with_new_column + it('should query an example table with an additional column and return matching rows', async () => { + const output = execSync( + `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 100000/); + assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 500000/); + }); + + // create_index + it('should create an index in an example table', async () => { + const output = execSync( + `node archived/index-create ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Waiting for operation to complete\.\.\./); + assert.match(output, /Added the AlbumsByAlbumTitle index\./); + }); + + // create_storing_index + it('should create a storing index in an example table', async function () { + this.retries(5); + // Delay the start of the test, if this is a retry. + await delay(this.test); + + const output = execSync( + `node archived/index-create-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Waiting for operation to complete\.\.\./); + assert.match(output, /Added the AlbumsByAlbumTitle2 index\./); + }); + + // query_data_with_index + it('should query an example table with an index and return matching rows', async () => { + const output = execSync( + `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ + ); + assert.notMatch( + output, + /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + ); + }); + + it('should respect query boundaries when querying an example table with an index', async () => { + const output = execSync( + `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID} "Ardvark" "Zoo"` + ); + assert.match( + output, + /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + ); + assert.match( + output, + /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ + ); + }); + + // read_data_with_index + it('should read an example table with an index', async () => { + const output = execSync( + `node index-read-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); + }); + + // read_data_with_storing_index + it('should read an example table with a storing index', async () => { + const output = execSync( + `node index-read-data-with-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); + }); + + // spanner_create_client_with_query_options + it('should use query options from a database reference', async () => { + const output = execSync( + `${queryOptionsCmd} databaseWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + ); + }); + + // spanner_query_with_query_options + it('should use query options on request', async () => { + const output = execSync( + `${queryOptionsCmd} queryWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + ); + }); + + // query with RPC priority for run command + it('should use RPC priority from request options for run command', async () => { + const output = execSync( + `${rpcPriorityRunCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully fetched \d rows using low RPC priority\./ + ); + assert.match( + output, + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + ); + }); + + // query with RPC priority for Read command + it('should use RPC priority from request options for read command', async () => { + const output = execSync( + `${rpcPriorityReadCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully fetched \d rows using low RPC priority\./ + ); + assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); + }); + + // query with RPC priority for transaction command + it('should use RPC priority from request options for transaction command', async () => { + const output = execSync( + `${rpcPriorityTransactionCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully inserted 1 record into the Singers table using low RPC priority\./ + ); + }); + + // query with RPC priority for batch DML command + it('should use RPC priority from request options for batch DML command', async () => { + const output = execSync( + `${rpcPriorityBatchDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully executed 2 SQL statements using Batch DML using low RPC priority\./ + ); + }); + + // query with RPC priority for partitioned DML command + it('should use RPC priority from request options for partitioned DML command', async () => { + const output = execSync( + `${rpcPriorityPartitionedDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully updated (\\d+) records using low RPC priority.') + ); + }); + + // query with RPC priority for Query partitions command + it('should use RPC priority from request options for Query partition command', async () => { + const output = execSync( + `${rpcPriorityQueryPartitionsCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully created \d query partitions using low RPC priority\./ + ); + assert.match(output, /Successfully received \d from executed partitions\./); + }); + + // read_only_transactioni + it('should read an example table using transactions', async () => { + const output = execSync( + `${transactionCmd} readOnly ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); + assert.match(output, /Successfully executed read-only transaction\./); + }); + + // read_write_transaction + it('should read from and write to an example table using transactions', async () => { + let output = execSync( + `${transactionCmd} readWrite ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /The first album's marketing budget: 100000/); + assert.match(output, /The second album's marketing budget: 500000/); + assert.match( + output, + /Successfully executed read-write transaction to transfer 200000 from Album 2 to Album 1./ + ); + output = execSync( + `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 300000/); + assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 300000/); + }); + + // batch_client + it('should create and execute query partitions', async () => { + const output = execSync( + `${batchCmd} create-and-execute-query-partitions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully created \d query partitions\./); + assert.match(output, /Successfully received \d from executed partitions\./); + }); + + // execute_partition + it('should execute a partition', async () => { + const instance = spanner.instance(INSTANCE_ID); + const database = instance.database(DATABASE_ID); + const [transaction] = await database.createBatchTransaction(); + const identifier = JSON.stringify(transaction.identifier()); + + const query = 'SELECT SingerId FROM Albums'; + const [partitions] = await transaction.createQueryPartitions(query); + const partition = JSON.stringify(partitions[0]); + + const output = execSync( + `${batchCmd} execute-partition ${INSTANCE_ID} ${DATABASE_ID} '${identifier}' '${partition}' ${PROJECT_ID}` + ); + assert.match(output, /Successfully received \d from executed partition\./); + await transaction.close(); + }); + + // add_timestamp_column + it('should add a timestamp column to a table', async () => { + const output = execSync( + `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Waiting for operation to complete\.\.\./); + assert.match( + output, + /Added LastUpdateTime as a commit timestamp column in Albums table\./ + ); + }); + + // update_data_with_timestamp_column + it('should update existing rows in an example table with commit timestamp column', async () => { + const output = execSync( + `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Updated data\./); + }); + + // query_data_with_timestamp_column + it('should query an example table with an additional timestamp column and return matching rows', async () => { + const output = execSync( + `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/ + ); + assert.match( + output, + /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/ + ); + }); + + // create_table_with_timestamp_column + it('should create an example table with a timestamp column', async () => { + const output = execSync( + `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + + assert.match( + output, + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp(`Created table Performances in database ${DATABASE_ID}.`) + ); + }); + + // insert_data_with_timestamp + it('should insert rows into an example table with timestamp column', async () => { + const output = execSync( + `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Inserted data\./); + }); + + // query_new_table_with_timestamp + it('should query an example table with a non-null timestamp column and return matching rows', async () => { + const output = execSync( + `${timestampCmd} queryTableWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, VenueId: 4, EventDate:/); + assert.match(output, /Revenue: 15000, LastUpdateTime:/); + }); + + // write_data_for_struct_queries + it('should insert rows into an example table for use with struct query examples', async () => { + const output = execSync( + `${structCmd} writeDataForStructQueries ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Inserted data\./); + }); + + // query_with_struct_param + it('should query an example table with a STRUCT param', async () => { + const output = execSync( + `${structCmd} queryDataWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 6/); + }); + + // query_with_array_of_struct_param + it('should query an example table with an array of STRUCT param', async () => { + const output = execSync( + `${structCmd} queryWithArrayOfStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 6\nSingerId: 7\nSingerId: 8/); + }); + + // query_with_struct_field_param + it('should query an example table with a STRUCT field param', async () => { + const output = execSync( + `${structCmd} queryStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 6/); + }); + + // query_with_nested_struct_param + it('should query an example table with a nested STRUCT param', async () => { + const output = execSync( + `${structCmd} queryNestedStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /SingerId: 6, SongName: Imagination\nSingerId: 9, SongName: Imagination/ + ); + }); + + // dml_standard_insert + it('should insert rows into an example table using a DML statement', async () => { + const output = execSync( + `${dmlCmd} insertUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully inserted 1 record into the Singers table/ + ); + }); + + // dml_standard_update + it('should update a row in an example table using a DML statement', async () => { + const output = execSync( + `${dmlCmd} updateUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully updated 1 record/); + }); + + // dml_standard_delete + it('should delete a row from an example table using a DML statement', async () => { + const output = execSync( + `${dmlCmd} deleteUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully deleted 1 record\./); + }); + + // dml_standard_update_with_timestamp + it('should update the timestamp of multiple records in an example table using a DML statement', async () => { + const output = execSync( + `${dmlCmd} updateUsingDmlWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully updated 3 records/); + }); + + // dml_write_then_read + it('should insert a record in an example table using a DML statement and then query the record', async () => { + const output = execSync( + `${dmlCmd} writeAndReadUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Timothy Campbell/); + }); + + // dml_structs + it('should update a record in an example table using a DML statement along with a struct value', async () => { + const output = execSync( + `${dmlCmd} updateUsingDmlWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully updated 1 record/); + }); + + // dml_getting_started_insert + it('should insert multiple records into an example table using a DML statement', async () => { + const output = execSync( + `${dmlCmd} writeUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /4 records inserted/); + }); + + // dml_query_with_parameter + it('should use a parameter query to query record that was inserted using a DML statement', async () => { + const output = execSync( + `${dmlCmd} queryWithParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 12, FirstName: Melissa, LastName: Garcia/); + }); + + // dml_getting_started_update + it('should transfer value from one record to another using DML statements within a transaction', async () => { + const output = execSync( + `${dmlCmd} writeWithTransactionUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully executed read-write transaction using DML to transfer 200000 from Album 2 to Album 1/ + ); + }); + + // dml_partitioned_update + it('should update multiple records using a partitioned DML statement', async () => { + const output = execSync( + `${dmlCmd} updateUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully updated 3 records/); + }); + + // dml_partitioned_delete + it('should delete multiple records using a partitioned DML statement', async () => { + const output = execSync( + `${dmlCmd} deleteUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Successfully deleted 6 records/); + }); + + // dml_batch_update + it('should insert and update records using Batch DML', async () => { + const output = execSync( + `${dmlCmd} updateUsingBatchDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /Successfully executed 2 SQL statements using Batch DML/ + ); + }); + + // dml_returning_insert + it('should insert records using DML Returning', async () => { + const output = execSync( + `node dml-returning-insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully inserted 1 record into the Singers table') + ); + assert.match(output, new RegExp('Virginia Watson')); + }); + + // dml_returning_update + it('should update records using DML Returning', async () => { + const output = execSync( + `node dml-returning-update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully updated 1 record into the Albums table') + ); + assert.match(output, new RegExp('2000000')); + }); + + // dml_returning_delete + it('should delete records using DML Returning', async () => { + const output = execSync( + `node dml-returning-delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully deleted 1 record from the Singers table') + ); + assert.match(output, new RegExp('Virginia Watson')); + }); + + // create_table_with_datatypes + it('should create Venues example table with supported datatype columns', async () => { + const output = execSync( + `${datatypesCmd} createVenuesTable "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + + assert.match( + output, + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp(`Created table Venues in database ${DATABASE_ID}.`) + ); + }); + + // insert_datatypes_data + it('should insert multiple records into Venues example table', async () => { + const output = execSync( + `${datatypesCmd} insertData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Inserted data./); + }); + + // query_with_array_parameter + it('should use an ARRAY query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithArray ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01/ + ); + assert.match( + output, + /VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01/ + ); + }); + + // query_with_bool_parameter + it('should use a BOOL query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithBool ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /VenueId: 19, VenueName: Venue 19, OutdoorVenue: true/ + ); + }); + + // query_with_bytes_parameter + it('should use a BYTES query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithBytes ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /VenueId: 4, VenueName: Venue 4/); + }); + + // query_with_date_parameter + it('should use a DATE query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithDate ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02/ + ); + assert.match( + output, + /VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01/ + ); + }); + + // query_with_float_parameter + it('should use a FLOAT64 query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithFloat ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + /VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8/ + ); + assert.match( + output, + /VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9/ + ); + }); + + // query_with_int_parameter + it('should use a INT64 query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithInt ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /VenueId: 19, VenueName: Venue 19, Capacity: 6300/); + assert.match(output, /VenueId: 42, VenueName: Venue 42, Capacity: 3000/); + }); + + // query_with_string_parameter + it('should use a STRING query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithString ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /VenueId: 42, VenueName: Venue 42/); + }); + + // query_with_timestamp_parameter + it('should use a TIMESTAMP query parameter to query record from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /VenueId: 4, VenueName: Venue 4, LastUpdateTime:/); + assert.match(output, /VenueId: 19, VenueName: Venue 19, LastUpdateTime:/); + assert.match(output, /VenueId: 42, VenueName: Venue 42, LastUpdateTime:/); + }); + + // add_numeric_column + it('should add a Revenue column to Venues example table', async () => { + const output = execSync( + `${datatypesCmd} addNumericColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + + assert.include( + output, + `Waiting for operation on ${DATABASE_ID} to complete...` + ); + assert.include( + output, + `Added Revenue column to Venues table in database ${DATABASE_ID}.` + ); + }); + + // update_data_with_numeric + it('should update rows in Venues example table to add data in Revenue column', async () => { + const output = execSync( + `${datatypesCmd} updateWithNumericData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Updated data./); + }); + + // query_with_numeric_parameter + it('should use a NUMERIC query parameter to query records from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithNumericParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /VenueId: 4, Revenue: 35000/); + }); + + // query with request tag + it('should execute a query with a request tag', async () => { + const output = execSync( + `${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); + }); + + // read_write_transaction with transaction tag + it('should execute a read/write transaction with a transaction tag', async () => { + const output = execSync( + `${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.include(output, 'Inserted new outdoor venue'); + }); + + // add_json_column + it('should add a VenueDetails column to Venues example table', async () => { + const output = execSync( + `${datatypesCmd} addJsonColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + + assert.include( + output, + `Waiting for operation on ${DATABASE_ID} to complete...` + ); + assert.include( + output, + `Added VenueDetails column to Venues table in database ${DATABASE_ID}.` + ); + }); + + // update_data_with_json + it('should update rows in Venues example table to add data in VenueDetails column', async () => { + const output = execSync( + `${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Updated data./); + }); + + // query_with_json_parameter + it('should use a JSON query parameter to query records from the Venues example table', async () => { + const output = execSync( + `${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /VenueId: 19, Details: {"open":true,"rating":9}/); + }); + + // add_and_drop_new_database_role + it('should add and drop new database roles', async () => { + const output = execSync( + `node archived/add-and-drop-new-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Waiting for operation to complete...')); + assert.match( + output, + new RegExp('Created roles child and parent and granted privileges') + ); + assert.match( + output, + new RegExp('Revoked privileges and dropped role child') + ); + }); + + // read_data_with_database_role + it('should read data with database role', async () => { + const output = execSync( + `node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('SingerId: 1, FirstName: Marc, LastName: Richards') + ); + }); + + // get_database_roles + it('should list database roles', async () => { + const output = execSync( + `node archived/get-database-roles.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Role: projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/databaseRoles/public` + ) + ); + }); + + // create_backup + it('should create a backup of the database', async () => { + const instance = spanner.instance(INSTANCE_ID); + const database = instance.database(DATABASE_ID); + const query = { + sql: 'SELECT CURRENT_TIMESTAMP() as Timestamp', + }; + const [rows] = await database.run(query); + const versionTime = rows[0].toJSON().Timestamp.toISOString(); + + const output = execSync( + `${backupsCmd} createBackup ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID} ${versionTime}` + ); + assert.match(output, new RegExp(`Backup (.+)${BACKUP_ID} of size`)); + }); + + // create_backup_with_encryption_key + it('should create an encrypted backup of the database', async () => { + const key = await getCryptoKey(); + + const output = execSync( + `${backupsCmd} createBackupWithEncryptionKey ${INSTANCE_ID} ${DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}` + ); + assert.match( + output, + new RegExp(`Backup (.+)${ENCRYPTED_BACKUP_ID} of size`) + ); + assert.include(output, `using encryption key ${key.name}`); + }); + + // copy_backup + it('should create a copy of a backup', async () => { + const sourceBackupPath = `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/backups/${BACKUP_ID}`; + const output = execSync( + `node archived/backups-copy.js ${INSTANCE_ID} ${COPY_BACKUP_ID} ${sourceBackupPath} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`(.*)Backup copy(.*)${COPY_BACKUP_ID} of size(.*)`) + ); + }); + + // cancel_backup + it('should cancel a backup of the database', async () => { + const output = execSync( + `${backupsCmd} cancelBackup ${INSTANCE_ID} ${DATABASE_ID} ${CANCELLED_BACKUP_ID} ${PROJECT_ID}` + ); + assert.match(output, /Backup cancelled./); + }); + + // get_backups + it('should list backups in the instance', async () => { + const output = execSync( + `${backupsCmd} getBackups ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + ); + assert.include(output, 'All backups:'); + assert.include(output, 'Backups matching backup name:'); + assert.include(output, 'Backups expiring within 30 days:'); + assert.include(output, 'Backups matching database name:'); + assert.include(output, 'Backups filtered by size:'); + assert.include(output, 'Ready backups filtered by create time:'); + assert.include(output, 'Get backups paginated:'); + const count = (output.match(new RegExp(`${BACKUP_ID}`, 'g')) || []).length; + assert.equal(count, 14); + }); + + // list_backup_operations + it('should list backup operations in the instance', async () => { + const output = execSync( + `${backupsCmd} getBackupOperations ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + ); + assert.match(output, /Create Backup Operations:/); + assert.match( + output, + new RegExp(`Backup (.+)${BACKUP_ID} (.+) is 100% complete`) + ); + assert.match(output, /Copy Backup Operations:/); + assert.match( + output, + new RegExp(`Backup (.+)${COPY_BACKUP_ID} (.+) is 100% complete`) + ); + }); + + // update_backup_expire_time + it('should update the expire time of a backup', async () => { + const output = execSync( + `${backupsCmd} updateBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}` + ); + assert.match(output, /Expire time updated./); + }); + + // restore_backup + it('should restore database from a backup', async function () { + // Restoring a backup can be a slow operation so the test may timeout and + // we'll have to retry. + this.retries(5); + // Delay the start of the test, if this is a retry. + await delay(this.test); + + const output = execSync( + `${backupsCmd} restoreBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + ); + assert.match(output, /Database restored from backup./); + assert.match( + output, + new RegExp( + `Database (.+) was restored to ${RESTORE_DATABASE_ID} from backup ` + + `(.+)${BACKUP_ID} with version time (.+)` + ) + ); + }); + + // restore_backup_with_encryption_key + it('should restore database from a backup using an encryption key', async function () { + // Restoring a backup can be a slow operation so the test may timeout and + // we'll have to retry. + this.retries(5); + // Delay the start of the test, if this is a retry. + await delay(this.test); + + const key = await getCryptoKey(); + + const output = execSync( + `${backupsCmd} restoreBackupWithEncryptionKey ${INSTANCE_ID} ${ENCRYPTED_RESTORE_DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}` + ); + assert.match(output, /Database restored from backup./); + assert.match( + output, + new RegExp( + `Database (.+) was restored to ${ENCRYPTED_RESTORE_DATABASE_ID} from backup ` + + `(.+)${ENCRYPTED_BACKUP_ID} using encryption key ${key.name}` + ) + ); + }); + + // list_database_operations + it('should list database operations in the instance', async () => { + const output = execSync( + `${backupsCmd} getDatabaseOperations ${INSTANCE_ID} ${PROJECT_ID}` + ); + assert.match(output, /Optimize Database Operations:/); + assert.match( + output, + new RegExp( + `Database (.+)${RESTORE_DATABASE_ID} restored from backup is (\\d+)% ` + + 'optimized' + ) + ); + }); + + // delete_backup + it('should delete a backup', async () => { + function sleep(timeMillis) { + return new Promise(resolve => setTimeout(resolve, timeMillis)); + } + + // Wait for database to finish optimizing - cannot delete a backup if a database restored from it + const instance = spanner.instance(INSTANCE_ID); + const database = instance.database(RESTORE_DATABASE_ID); + while ((await database.getState()) === 'READY_OPTIMIZING') { + await sleep(1000); + } + + const output = execSync( + `${backupsCmd} deleteBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + ); + assert.match(output, /Backup deleted./); + }); + + // custom_timeout_and_retry + it('should insert with custom timeout and retry settings', async () => { + const output = execSync( + `${dmlCmd} insertWithCustomTimeoutAndRetrySettings ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, /record inserted./); + }); + + // get_commit_stats + it('should update rows in Albums example table and return CommitStats', async () => { + const output = execSync( + `${crudCmd} getCommitStats ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Updated data with (\\d+) mutations')); + }); + + // create_database_with_version_retention_period + it('should create a database with a version retention period', async () => { + const output = execSync( + `${schemaCmd} createDatabaseWithVersionRetentionPeriod "${INSTANCE_ID}" "${VERSION_RETENTION_DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for operation on ${VERSION_RETENTION_DATABASE_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp( + `Created database ${VERSION_RETENTION_DATABASE_ID} with version retention period.` + ) + ); + assert.include(output, 'Version retention period: 1d'); + assert.include(output, 'Earliest version time:'); + }); + + it('should create a table with foreign key delete cascade', async () => { + const output = execSync( + `${createTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId' + ) + ); + }); + + it('should alter a table with foreign key delete cascade', async () => { + const output = execSync( + `${alterTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp('Altered ShoppingCarts table with FKShoppingCartsCustomerName') + ); + }); + + it('should drop a foreign key constraint delete cascade', async () => { + const output = execSync( + `${dropForeignKeyConstraintDeleteCascaseCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName' + ) + ); + }); + + describe('sequence', () => { + before(async () => { + const instance = spanner.instance(INSTANCE_ID); + const database = instance.database(SEQUENCE_DATABASE_ID); + const [, operation_seq] = await database.create(); + await operation_seq.promise(); + }); + + after(async () => { + await spanner + .instance(INSTANCE_ID) + .database(SEQUENCE_DATABASE_ID) + .delete(); + }); + + // create_sequence + it('should create a sequence', async () => { + const output = execSync( + `node archived/sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Created Seq sequence and Customers table') + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); + }); + + // alter_sequence + it('should alter a sequence', async () => { + const output = execSync( + `node archived/sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + ) + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); + }); + + // drop_sequence + it('should drop a sequence', async () => { + const output = execSync( + `node archived/sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + ) + ); + }); + }); + + describe('leader options', () => { + before(async () => { + const instance = spanner.instance(SAMPLE_INSTANCE_ID); + const [, operation] = await instance.create({ + config: 'nam6', + nodes: 1, + displayName: 'Multi-region options test', + labels: { + ['cloud_spanner_samples']: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, + }); + await operation.promise(); + }); + + after(async () => { + const instance = spanner.instance(SAMPLE_INSTANCE_ID); + await instance.delete(); + }); + + // create_instance_config + it('should create an example custom instance config', async () => { + const output = execSync( + `node archived/instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + ); + }); + + // update_instance_config + it('should update an example custom instance config', async () => { + const output = execSync( + `node archived/instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + ); + }); + + // delete_instance_config + it('should delete an example custom instance config', async () => { + const output = execSync( + `node archived/instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`) + ); + assert.match( + output, + new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + ); + }); + + // list_instance_config_operations + it('should list all instance config operations', async () => { + const output = execSync( + `node archived/instance-config-get-operations.js ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Available instance config operations for project ${PROJECT_ID}:` + ) + ); + assert.include(output, 'Instance config operation for'); + assert.include( + output, + 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' + ); + }); + + // list_instance_configs + it('should list available instance configs', async () => { + const output = execSync( + `node archived/list-instance-configs.js ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Available instance configs for project ${PROJECT_ID}:`) + ); + assert.include(output, 'Available leader options for instance config'); + }); + + // get_instance_config + // TODO: Enable when the feature has been released. + it.skip('should get a specific instance config', async () => { + const output = execSync( + `node archived/get-instance-config.js ${PROJECT_ID}` + ); + assert.include(output, 'Available leader options for instance config'); + }); + + // create_database_with_default_leader + it('should create a database with a default leader', async () => { + const output = execSync( + `node archived/database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp( + `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.` + ) + ); + }); + + // update_database_with_default_leader + it('should update a database with a default leader', async () => { + const output = execSync( + `node archived/database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...` + ) + ); + assert.match( + output, + new RegExp( + `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.` + ) + ); + }); + + // get_default_leader + it('should get the default leader option of a database', async () => { + const output = execSync( + `node archived/database-get-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + ); + assert.include( + output, + `The default_leader for ${DEFAULT_LEADER_DATABASE_ID} is ${DEFAULT_LEADER_2}` + ); + }); + + // list_databases + it('should list databases on the instance', async () => { + const output = execSync( + `node archived/list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:` + ) + ); + assert.include(output, `(default leader = ${DEFAULT_LEADER_2}`); + }); + + // get_database_ddl + it('should get the ddl of a database', async () => { + const output = execSync( + `node archived/database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:` + ) + ); + assert.include(output, 'CREATE TABLE Singers'); + }); + }); + + describe('postgreSQL', () => { + before(async () => { + const instance = spanner.instance(SAMPLE_INSTANCE_ID); + const [, operation] = await instance.create({ + config: PG_LOCATION_ID, + nodes: 1, + displayName: 'PostgreSQL Test', + labels: { + ['cloud_spanner_samples']: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, + }); + await operation.promise(); + }); + + after(async () => { + const instance = spanner.instance(SAMPLE_INSTANCE_ID); + await instance.delete(); + }); + + // create_pg_database + it('should create an example PostgreSQL database', async () => { + const output = execSync( + `node archived/pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.` + ) + ); + }); + + // pg_interleaving + it('should create an interleaved table hierarchy using PostgreSQL dialect', async () => { + const output = execSync( + `node archived/pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.` + ) + ); + }); + + // pg_dml_with_parameter + it('should execute a DML statement with parameters on a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-dml-with-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully executed 1 postgreSQL statements using DML') + ); + }); + + // pg_dml_batch + it('should execute a batch of DML statements on a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-dml-batch.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Successfully executed 3 postgreSQL statements using Batch DML.' + ) + ); + }); + + // pg_dml_partitioned + it('should execute a partitioned DML on a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-dml-partitioned.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Successfully deleted 1 record.')); + }); + + // pg_query_with_parameters + it('should execute a query with parameters on a Spanner PostgreSQL database.', async () => { + const output = execSync( + `node pg-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('SingerId: 1, FirstName: Alice, LastName: Henderson') + ); + }); + + // pg_dml_update + it('should update a table using parameterized queries on a Spanner PostgreSQL database.', async () => { + const output = execSync( + `node pg-dml-getting-started-update.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully updated 1 record in the Singers table.') + ); + }); + + // pg_add_column + it('should add a column to a table in the Spanner PostgreSQL database.', async () => { + const output = execSync( + `node archived/pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}` + ) + ); + }); + + //pg_create_index + it('should create an index in the Spanner PostgreSQL database.', async () => { + const output = execSync( + `node archived/pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Added the AlbumsByAlbumTitle index.')); + }); + + // pg_schema_information + it('should query the information schema metadata in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-schema-information.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Table: public.albums')); + assert.match(output, new RegExp('Table: public.author')); + assert.match(output, new RegExp('Table: public.book')); + assert.match(output, new RegExp('Table: public.singers')); + }); + + // pg_ordering_nulls + it('should order nulls as per clause in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-ordering-nulls.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Author ORDER BY FirstName')); + assert.match(output, new RegExp('Author ORDER BY FirstName DESC')); + assert.match(output, new RegExp('Author ORDER BY FirstName NULLS FIRST')); + assert.match( + output, + new RegExp('Author ORDER BY FirstName DESC NULLS LAST') + ); + }); + + // pg_numeric_data_type + it('should create a table, insert and query pg numeric data', async () => { + const output = execSync( + `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`) + ); + assert.match(output, new RegExp('Inserted data.')); + assert.match(output, new RegExp('VenueId: 4, Revenue: 97372.3863')); + assert.match(output, new RegExp('VenueId: 19, Revenue: 7629')); + assert.match(output, new RegExp('VenueId: 398, Revenue: 0.000000123')); + }); + + // pg_jsonb_add_column + it('should add a jsonb column to a table', async () => { + const output = execSync( + `node archived/pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + `Added jsonb column to table venues to database ${PG_DATABASE_ID}.` + ) + ); + }); + + // pg_jsonb_insert_data + it('should insert pg jsonb data', async () => { + const output = execSync( + `node pg-jsonb-update-data.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Updated data.')); + }); + + // pg_jsonb_query_data + it('should query pg jsonb data', async () => { + const output = execSync( + `node pg-jsonb-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('VenueId: 19, Details: {"value":{"open":true,"rating":9}}') + ); + }); + + // pg_case_sensitivity + it('should create case sensitive table and query the information in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-case-sensitivity.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Created table with case sensitive names in database ${PG_DATABASE_ID} using PostgreSQL dialect.` + ) + ); + assert.match(output, new RegExp('Inserted data using mutations.')); + assert.match(output, new RegExp('Concerts Table Data using Mutations:')); + assert.match(output, new RegExp('Concerts Table Data using Aliases:')); + assert.match(output, new RegExp('Inserted data using DML.')); + }); + + // pg_datatypes_casting + it('should use cast operator to cast from one data type to another in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-datatypes-casting.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Data types after casting')); + }); + + // pg_functions + it('should call a server side function on a Spanner PostgreSQL database.', async () => { + const output = execSync( + `node pg-functions.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('1284352323 seconds after epoch is')); + }); + + // pg_dml_returning_insert + it('should insert records using DML Returning in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-dml-returning-insert ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully inserted 1 record into the Singers table') + ); + assert.match(output, new RegExp('Virginia Watson')); + }); + + // pg_dml_returning_update + it('should update records using DML Returning in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-dml-returning-update ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully updated 1 record into the Singers table') + ); + assert.match(output, new RegExp('Virginia1 Watson1')); + }); + + // pg_dml_returning_delete + it('should delete records using DML Returning in a Spanner PostgreSQL database', async () => { + const output = execSync( + `node pg-dml-returning-delete ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Successfully deleted 1 record from the Singers table') + ); + assert.match(output, new RegExp('Virginia1 Watson1')); + }); + + // pg_create_sequence + it('should create a sequence', async () => { + const output = execSync( + `node archived/pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Created Seq sequence and Customers table') + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); + }); + + // pg_alter_sequence + it('should alter a sequence', async () => { + const output = execSync( + `node archived/pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + ) + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); + }); + + // pg_drop_sequence + it('should drop a sequence', async () => { + const output = execSync( + `node archived/pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + ) + ); + }); + + // directed_read_options + it('should run read-only transaction with directed read options set', async () => { + const output = execSync( + `node directed-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + ); + console.log(output); + assert.match( + output, + new RegExp( + 'SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace' + ) + ); + assert.match( + output, + new RegExp( + 'Successfully executed read-only transaction with directedReadOptions' + ) + ); + }); + }); +}); diff --git a/samples/system-test/spanner.test.js b/samples/system-test/spanner.test.js index 04645c9ff..c4bf903fa 100644 --- a/samples/system-test/spanner.test.js +++ b/samples/system-test/spanner.test.js @@ -1,4 +1,4 @@ -// Copyright 2017 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,34 +15,19 @@ 'use strict'; const {Spanner} = require('@google-cloud/spanner'); +const pLimit = require('p-limit'); +const {describe, it, before, after, afterEach} = require('mocha'); const {KeyManagementServiceClient} = require('@google-cloud/kms'); const {assert} = require('chai'); -const {describe, it, before, after, afterEach} = require('mocha'); const cp = require('child_process'); -const pLimit = require('p-limit'); const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); - -const batchCmd = 'node batch.js'; -const crudCmd = 'node crud.js'; +const instanceCmd = 'node instance.js'; const schemaCmd = 'node schema.js'; -const queryOptionsCmd = 'node queryoptions.js'; -const rpcPriorityRunCommand = 'node rpc-priority-run.js'; -const rpcPriorityReadCommand = 'node rpc-priority-read.js'; -const rpcPriorityBatchDMLCommand = 'node rpc-priority-batch-dml.js'; -const rpcPriorityPartitionedDMLCommand = 'node rpc-priority-partitioned-dml.js'; -const rpcPriorityTransactionCommand = 'node rpc-priority-transaction.js'; -const rpcPriorityQueryPartitionsCommand = - 'node rpc-priority-query-partitions.js'; -const transactionCmd = 'node transaction.js'; -const transactionTagCommand = 'node transaction-tag.js'; -const requestTagCommand = 'node request-tag.js'; -const timestampCmd = 'node timestamp.js'; -const structCmd = 'node struct.js'; -const dmlCmd = 'node dml.js'; -const datatypesCmd = 'node datatypes.js'; const backupsCmd = 'node backups.js'; -const instanceCmd = 'node instance.js'; +const crudCmd = 'node crud.js'; +const datatypesCmd = 'node datatypes.js'; +const timestampCmd = 'node timestamp.js'; const createTableWithForeignKeyDeleteCascadeCommand = 'node table-create-with-foreign-key-delete-cascade.js'; const alterTableWithForeignKeyDeleteCascadeCommand = @@ -53,20 +38,20 @@ const dropForeignKeyConstraintDeleteCascaseCommand = const CURRENT_TIME = Math.round(Date.now() / 1000).toString(); const PROJECT_ID = process.env.GCLOUD_PROJECT; const PREFIX = 'test-instance'; +const SAMPLE_INSTANCE_ID = `${PREFIX}-my-sample-instance-${CURRENT_TIME}`; const INSTANCE_ID = process.env.SPANNERTEST_INSTANCE || `${PREFIX}-${CURRENT_TIME}`; -const SAMPLE_INSTANCE_ID = `${PREFIX}-my-sample-instance-${CURRENT_TIME}`; const SAMPLE_INSTANCE_CONFIG_ID = `custom-my-sample-instance-config-${CURRENT_TIME}`; -const BASE_INSTANCE_CONFIG_ID = 'regional-us-west2'; const INSTANCE_ALREADY_EXISTS = !!process.env.SPANNERTEST_INSTANCE; +const BASE_INSTANCE_CONFIG_ID = 'regional-us-central1'; const DATABASE_ID = `test-database-${CURRENT_TIME}`; -const PG_DATABASE_ID = `test-pg-database-${CURRENT_TIME}`; -const RESTORE_DATABASE_ID = `test-database-${CURRENT_TIME}-r`; -const ENCRYPTED_RESTORE_DATABASE_ID = `test-database-${CURRENT_TIME}-r-enc`; -const VERSION_RETENTION_DATABASE_ID = `test-database-${CURRENT_TIME}-v`; const ENCRYPTED_DATABASE_ID = `test-database-${CURRENT_TIME}-enc`; const DEFAULT_LEADER_DATABASE_ID = `test-database-${CURRENT_TIME}-dl`; +const VERSION_RETENTION_DATABASE_ID = `test-database-${CURRENT_TIME}-v`; const SEQUENCE_DATABASE_ID = `test-seq-database-${CURRENT_TIME}-r`; +const PG_DATABASE_ID = `test-pg-database-${CURRENT_TIME}`; +const RESTORE_DATABASE_ID = `test-database-${CURRENT_TIME}-r`; +const ENCRYPTED_RESTORE_DATABASE_ID = `test-database-${CURRENT_TIME}-r-enc`; const BACKUP_ID = `test-backup-${CURRENT_TIME}`; const COPY_BACKUP_ID = `test-copy-backup-${CURRENT_TIME}`; const ENCRYPTED_BACKUP_ID = `test-backup-${CURRENT_TIME}-enc`; @@ -136,7 +121,6 @@ async function deleteInstance(instance) { await Promise.all(backups.map(backup => backup.delete(GAX_OPTIONS))); return instance.delete(GAX_OPTIONS); } - async function getCryptoKey() { const NOT_FOUND = 5; @@ -199,8 +183,7 @@ async function getCryptoKey() { } } } - -describe('Spanner', () => { +describe('Autogenerated Admin Clients', () => { const instance = spanner.instance(INSTANCE_ID); before(async () => { @@ -250,7 +233,6 @@ describe('Spanner', () => { } await spanner.instance(SAMPLE_INSTANCE_ID).delete(GAX_OPTIONS); }); - describe('instance', () => { afterEach(async () => { const sample_instance = spanner.instance(SAMPLE_INSTANCE_ID); @@ -309,11 +291,11 @@ describe('Spanner', () => { // create_database it('should create an example database', async () => { const output = execSync( - `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" "${PROJECT_ID}"` ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for creation of ${DATABASE_ID} to complete...`) ); assert.match( output, @@ -352,7 +334,7 @@ describe('Spanner', () => { const key = await getCryptoKey(); const output = execSync( - `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" ${PROJECT_ID} "${key.name}"` + `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" "${PROJECT_ID}" "${key.name}"` ); assert.match( output, @@ -373,14 +355,160 @@ describe('Spanner', () => { }); }); - describe('quickstart', () => { - // Running the quickstart test in here since there's already a spanner - // instance and database set up at this point. - it('should query a table', async () => { + describe('postgreSQL', () => { + before(async () => { + const instance = spanner.instance(SAMPLE_INSTANCE_ID); + const [, operation] = await instance.create({ + config: PG_LOCATION_ID, + nodes: 1, + displayName: 'PostgreSQL Test', + labels: { + ['cloud_spanner_samples']: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, + }); + await operation.promise(); + }); + + after(async () => { + const instance = spanner.instance(SAMPLE_INSTANCE_ID); + await instance.delete(); + }); + + // create_pg_database + it('should create an example PostgreSQL database', async () => { + const output = execSync( + `node pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.` + ) + ); + }); + + // pg_interleaving + it('should create an interleaved table hierarchy using PostgreSQL dialect', async () => { + const output = execSync( + `node pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.` + ) + ); + }); + + // pg_add_column + it('should add a column to a table in the Spanner PostgreSQL database.', async () => { + const output = execSync( + `node pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}` + ) + ); + }); + + //pg_create_index + it('should create an index in the Spanner PostgreSQL database.', async () => { + const output = execSync( + `node pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match(output, new RegExp('Added the AlbumsByAlbumTitle index.')); + }); + + // pg_numeric_data_type + it('should create a table, insert and query pg numeric data', async () => { + const output = execSync( + `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`) + ); + assert.match(output, new RegExp('Inserted data.')); + assert.match(output, new RegExp('VenueId: 4, Revenue: 97372.3863')); + assert.match(output, new RegExp('VenueId: 19, Revenue: 7629')); + assert.match(output, new RegExp('VenueId: 398, Revenue: 0.000000123')); + }); + + // pg_jsonb_add_column + it('should add a jsonb column to a table', async () => { + const output = execSync( + `node pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + ); + assert.match( + output, + new RegExp( + `Added jsonb column to table venues to database ${PG_DATABASE_ID}.` + ) + ); + }); + + // pg_create_sequence + it('should create a sequence', async () => { + const output = execSync( + `node pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp('Created Seq sequence and Customers table') + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); + }); + + // pg_alter_sequence + it('should alter a sequence', async () => { + const output = execSync( + `node pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + ) + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); + }); + + // pg_drop_sequence + it('should drop a sequence', async () => { const output = execSync( - `node quickstart ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + `node pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + ); + assert.match( + output, + new RegExp( + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + ) ); - assert.match(output, /Query: \d+ found./); }); }); @@ -392,36 +520,6 @@ describe('Spanner', () => { assert.match(output, /Inserted data\./); }); - // delete_data - it('should delete and then insert rows in the example tables', async () => { - let output = execSync( - `${crudCmd} delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.include(output, 'Deleted individual rows in Albums.'); - assert.include(output, '2 records deleted from Singers.'); - assert.include(output, '3 records deleted from Singers.'); - output = execSync( - `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Inserted data\./); - }); - - // query_data - it('should query an example table and return matching rows', async () => { - const output = execSync( - `${crudCmd} query ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); - }); - - // read_data - it('should read an example table', async () => { - const output = execSync( - `${crudCmd} read ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); - }); - // add_column it('should add a column to a table', async () => { const output = execSync( @@ -431,41 +529,6 @@ describe('Spanner', () => { assert.match(output, /Added the MarketingBudget column\./); }); - // update_data - it('should update existing rows in an example table', async () => { - const output = execSync( - `${crudCmd} update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Updated data\./); - }); - - // read_stale_data - it('should read stale data from an example table', async () => { - // read-stale-data reads data that is exactly 15 seconds old. So, make sure - // 15 seconds have elapsed since the update_data test. - await new Promise(r => setTimeout(r, 16000)); - const output = execSync( - `${crudCmd} read-stale ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget: 100000/ - ); - assert.match( - output, - /SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget: 500000/ - ); - }); - - // query_data_with_new_column - it('should query an example table with an additional column and return matching rows', async () => { - const output = execSync( - `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 100000/); - assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 500000/); - }); - // create_index it('should create an index in an example table', async () => { const output = execSync( @@ -488,254 +551,61 @@ describe('Spanner', () => { assert.match(output, /Added the AlbumsByAlbumTitle2 index\./); }); - // query_data_with_index - it('should query an example table with an index and return matching rows', async () => { - const output = execSync( - `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ - ); - assert.notMatch( - output, - /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ - ); - }); - - it('should respect query boundaries when querying an example table with an index', async () => { + // add_timestamp_column + it('should add a timestamp column to a table', async () => { const output = execSync( - `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID} "Ardvark" "Zoo"` - ); - assert.match( - output, - /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` ); + assert.match(output, /Waiting for operation to complete\.\.\./); assert.match( output, - /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ - ); - }); - - // read_data_with_index - it('should read an example table with an index', async () => { - const output = execSync( - `node index-read-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + /Added LastUpdateTime as a commit timestamp column in Albums table\./ ); - assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); }); - // read_data_with_storing_index - it('should read an example table with a storing index', async () => { + // update_data_with_timestamp_column + it('should update existing rows in an example table with commit timestamp column', async () => { const output = execSync( - `node index-read-data-with-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` ); - assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); + assert.match(output, /Updated data\./); }); - // spanner_create_client_with_query_options - it('should use query options from a database reference', async () => { + // query_data_with_timestamp_column + it('should query an example table with an additional timestamp column and return matching rows', async () => { const output = execSync( - `${queryOptionsCmd} databaseWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ - ); - }); - - // spanner_query_with_query_options - it('should use query options on request', async () => { - const output = execSync( - `${queryOptionsCmd} queryWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/ ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/ ); }); - // query with RPC priority for run command - it('should use RPC priority from request options for run command', async () => { + // create_table_with_timestamp_column + it('should create an example table with a timestamp column', async () => { const output = execSync( - `${rpcPriorityRunCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` ); + assert.match( output, - /Successfully fetched \d rows using low RPC priority\./ + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + new RegExp(`Created table Performances in database ${DATABASE_ID}.`) ); }); - // query with RPC priority for Read command - it('should use RPC priority from request options for read command', async () => { + // insert_data_with_timestamp + it('should insert rows into an example table with timestamp column', async () => { const output = execSync( - `${rpcPriorityReadCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully fetched \d rows using low RPC priority\./ - ); - assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); - }); - - // query with RPC priority for transaction command - it('should use RPC priority from request options for transaction command', async () => { - const output = execSync( - `${rpcPriorityTransactionCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully inserted 1 record into the Singers table using low RPC priority\./ - ); - }); - - // query with RPC priority for batch DML command - it('should use RPC priority from request options for batch DML command', async () => { - const output = execSync( - `${rpcPriorityBatchDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully executed 2 SQL statements using Batch DML using low RPC priority\./ - ); - }); - - // query with RPC priority for partitioned DML command - it('should use RPC priority from request options for partitioned DML command', async () => { - const output = execSync( - `${rpcPriorityPartitionedDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully updated (\\d+) records using low RPC priority.') - ); - }); - - // query with RPC priority for Query partitions command - it('should use RPC priority from request options for Query partition command', async () => { - const output = execSync( - `${rpcPriorityQueryPartitionsCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully created \d query partitions using low RPC priority\./ - ); - assert.match(output, /Successfully received \d from executed partitions\./); - }); - - // read_only_transactioni - it('should read an example table using transactions', async () => { - const output = execSync( - `${transactionCmd} readOnly ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); - assert.match(output, /Successfully executed read-only transaction\./); - }); - - // read_write_transaction - it('should read from and write to an example table using transactions', async () => { - let output = execSync( - `${transactionCmd} readWrite ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /The first album's marketing budget: 100000/); - assert.match(output, /The second album's marketing budget: 500000/); - assert.match( - output, - /Successfully executed read-write transaction to transfer 200000 from Album 2 to Album 1./ - ); - output = execSync( - `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 300000/); - assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 300000/); - }); - - // batch_client - it('should create and execute query partitions', async () => { - const output = execSync( - `${batchCmd} create-and-execute-query-partitions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully created \d query partitions\./); - assert.match(output, /Successfully received \d from executed partitions\./); - }); - - // execute_partition - it('should execute a partition', async () => { - const instance = spanner.instance(INSTANCE_ID); - const database = instance.database(DATABASE_ID); - const [transaction] = await database.createBatchTransaction(); - const identifier = JSON.stringify(transaction.identifier()); - - const query = 'SELECT SingerId FROM Albums'; - const [partitions] = await transaction.createQueryPartitions(query); - const partition = JSON.stringify(partitions[0]); - - const output = execSync( - `${batchCmd} execute-partition ${INSTANCE_ID} ${DATABASE_ID} '${identifier}' '${partition}' ${PROJECT_ID}` - ); - assert.match(output, /Successfully received \d from executed partition\./); - await transaction.close(); - }); - - // add_timestamp_column - it('should add a timestamp column to a table', async () => { - const output = execSync( - `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Waiting for operation to complete\.\.\./); - assert.match( - output, - /Added LastUpdateTime as a commit timestamp column in Albums table\./ - ); - }); - - // update_data_with_timestamp_column - it('should update existing rows in an example table with commit timestamp column', async () => { - const output = execSync( - `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Updated data\./); - }); - - // query_data_with_timestamp_column - it('should query an example table with an additional timestamp column and return matching rows', async () => { - const output = execSync( - `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/ - ); - assert.match( - output, - /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/ - ); - }); - - // create_table_with_timestamp_column - it('should create an example table with a timestamp column', async () => { - const output = execSync( - `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` - ); - - assert.match( - output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) - ); - assert.match( - output, - new RegExp(`Created table Performances in database ${DATABASE_ID}.`) - ); - }); - - // insert_data_with_timestamp - it('should insert rows into an example table with timestamp column', async () => { - const output = execSync( - `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` ); assert.match(output, /Inserted data\./); }); @@ -749,190 +619,6 @@ describe('Spanner', () => { assert.match(output, /Revenue: 15000, LastUpdateTime:/); }); - // write_data_for_struct_queries - it('should insert rows into an example table for use with struct query examples', async () => { - const output = execSync( - `${structCmd} writeDataForStructQueries ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Inserted data\./); - }); - - // query_with_struct_param - it('should query an example table with a STRUCT param', async () => { - const output = execSync( - `${structCmd} queryDataWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 6/); - }); - - // query_with_array_of_struct_param - it('should query an example table with an array of STRUCT param', async () => { - const output = execSync( - `${structCmd} queryWithArrayOfStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 6\nSingerId: 7\nSingerId: 8/); - }); - - // query_with_struct_field_param - it('should query an example table with a STRUCT field param', async () => { - const output = execSync( - `${structCmd} queryStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 6/); - }); - - // query_with_nested_struct_param - it('should query an example table with a nested STRUCT param', async () => { - const output = execSync( - `${structCmd} queryNestedStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /SingerId: 6, SongName: Imagination\nSingerId: 9, SongName: Imagination/ - ); - }); - - // dml_standard_insert - it('should insert rows into an example table using a DML statement', async () => { - const output = execSync( - `${dmlCmd} insertUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully inserted 1 record into the Singers table/ - ); - }); - - // dml_standard_update - it('should update a row in an example table using a DML statement', async () => { - const output = execSync( - `${dmlCmd} updateUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully updated 1 record/); - }); - - // dml_standard_delete - it('should delete a row from an example table using a DML statement', async () => { - const output = execSync( - `${dmlCmd} deleteUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully deleted 1 record\./); - }); - - // dml_standard_update_with_timestamp - it('should update the timestamp of multiple records in an example table using a DML statement', async () => { - const output = execSync( - `${dmlCmd} updateUsingDmlWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully updated 3 records/); - }); - - // dml_write_then_read - it('should insert a record in an example table using a DML statement and then query the record', async () => { - const output = execSync( - `${dmlCmd} writeAndReadUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Timothy Campbell/); - }); - - // dml_structs - it('should update a record in an example table using a DML statement along with a struct value', async () => { - const output = execSync( - `${dmlCmd} updateUsingDmlWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully updated 1 record/); - }); - - // dml_getting_started_insert - it('should insert multiple records into an example table using a DML statement', async () => { - const output = execSync( - `${dmlCmd} writeUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /4 records inserted/); - }); - - // dml_query_with_parameter - it('should use a parameter query to query record that was inserted using a DML statement', async () => { - const output = execSync( - `${dmlCmd} queryWithParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 12, FirstName: Melissa, LastName: Garcia/); - }); - - // dml_getting_started_update - it('should transfer value from one record to another using DML statements within a transaction', async () => { - const output = execSync( - `${dmlCmd} writeWithTransactionUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully executed read-write transaction using DML to transfer 200000 from Album 2 to Album 1/ - ); - }); - - // dml_partitioned_update - it('should update multiple records using a partitioned DML statement', async () => { - const output = execSync( - `${dmlCmd} updateUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully updated 3 records/); - }); - - // dml_partitioned_delete - it('should delete multiple records using a partitioned DML statement', async () => { - const output = execSync( - `${dmlCmd} deleteUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Successfully deleted 6 records/); - }); - - // dml_batch_update - it('should insert and update records using Batch DML', async () => { - const output = execSync( - `${dmlCmd} updateUsingBatchDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /Successfully executed 2 SQL statements using Batch DML/ - ); - }); - - // dml_returning_insert - it('should insert records using DML Returning', async () => { - const output = execSync( - `node dml-returning-insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully inserted 1 record into the Singers table') - ); - assert.match(output, new RegExp('Virginia Watson')); - }); - - // dml_returning_update - it('should update records using DML Returning', async () => { - const output = execSync( - `node dml-returning-update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully updated 1 record into the Albums table') - ); - assert.match(output, new RegExp('2000000')); - }); - - // dml_returning_delete - it('should delete records using DML Returning', async () => { - const output = execSync( - `node dml-returning-delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully deleted 1 record from the Singers table') - ); - assert.match(output, new RegExp('Virginia Watson')); - }); - // create_table_with_datatypes it('should create Venues example table with supported datatype columns', async () => { const output = execSync( @@ -949,105 +635,6 @@ describe('Spanner', () => { ); }); - // insert_datatypes_data - it('should insert multiple records into Venues example table', async () => { - const output = execSync( - `${datatypesCmd} insertData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Inserted data./); - }); - - // query_with_array_parameter - it('should use an ARRAY query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithArray ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01/ - ); - assert.match( - output, - /VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01/ - ); - }); - - // query_with_bool_parameter - it('should use a BOOL query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithBool ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /VenueId: 19, VenueName: Venue 19, OutdoorVenue: true/ - ); - }); - - // query_with_bytes_parameter - it('should use a BYTES query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithBytes ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /VenueId: 4, VenueName: Venue 4/); - }); - - // query_with_date_parameter - it('should use a DATE query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithDate ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02/ - ); - assert.match( - output, - /VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01/ - ); - }); - - // query_with_float_parameter - it('should use a FLOAT64 query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithFloat ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - /VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8/ - ); - assert.match( - output, - /VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9/ - ); - }); - - // query_with_int_parameter - it('should use a INT64 query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithInt ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /VenueId: 19, VenueName: Venue 19, Capacity: 6300/); - assert.match(output, /VenueId: 42, VenueName: Venue 42, Capacity: 3000/); - }); - - // query_with_string_parameter - it('should use a STRING query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithString ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /VenueId: 42, VenueName: Venue 42/); - }); - - // query_with_timestamp_parameter - it('should use a TIMESTAMP query parameter to query record from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /VenueId: 4, VenueName: Venue 4, LastUpdateTime:/); - assert.match(output, /VenueId: 19, VenueName: Venue 19, LastUpdateTime:/); - assert.match(output, /VenueId: 42, VenueName: Venue 42, LastUpdateTime:/); - }); - // add_numeric_column it('should add a Revenue column to Venues example table', async () => { const output = execSync( @@ -1064,38 +651,6 @@ describe('Spanner', () => { ); }); - // update_data_with_numeric - it('should update rows in Venues example table to add data in Revenue column', async () => { - const output = execSync( - `${datatypesCmd} updateWithNumericData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Updated data./); - }); - - // query_with_numeric_parameter - it('should use a NUMERIC query parameter to query records from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithNumericParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /VenueId: 4, Revenue: 35000/); - }); - - // query with request tag - it('should execute a query with a request tag', async () => { - const output = execSync( - `${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); - }); - - // read_write_transaction with transaction tag - it('should execute a read/write transaction with a transaction tag', async () => { - const output = execSync( - `${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.include(output, 'Inserted new outdoor venue'); - }); - // add_json_column it('should add a VenueDetails column to Venues example table', async () => { const output = execSync( @@ -1112,22 +667,6 @@ describe('Spanner', () => { ); }); - // update_data_with_json - it('should update rows in Venues example table to add data in VenueDetails column', async () => { - const output = execSync( - `${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /Updated data./); - }); - - // query_with_json_parameter - it('should use a JSON query parameter to query records from the Venues example table', async () => { - const output = execSync( - `${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /VenueId: 19, Details: {"open":true,"rating":9}/); - }); - // add_and_drop_new_database_role it('should add and drop new database roles', async () => { const output = execSync( @@ -1144,17 +683,6 @@ describe('Spanner', () => { ); }); - // read_data_with_database_role - it('should read data with database role', async () => { - const output = execSync( - `node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('SingerId: 1, FirstName: Marc, LastName: Richards') - ); - }); - // get_database_roles it('should list database roles', async () => { const output = execSync( @@ -1242,12 +770,16 @@ describe('Spanner', () => { assert.match(output, /Create Backup Operations:/); assert.match( output, - new RegExp(`Backup (.+)${BACKUP_ID} (.+) is 100% complete`) + new RegExp( + `Backup (.+)${BACKUP_ID} on database (.+)${DATABASE_ID} is 100% complete.` + ) ); assert.match(output, /Copy Backup Operations:/); assert.match( output, - new RegExp(`Backup (.+)${COPY_BACKUP_ID} (.+) is 100% complete`) + new RegExp( + `Backup (.+)${COPY_BACKUP_ID} copied from source backup (.+)${BACKUP_ID} is 100% complete` + ) ); }); @@ -1327,32 +859,17 @@ describe('Spanner', () => { // Wait for database to finish optimizing - cannot delete a backup if a database restored from it const instance = spanner.instance(INSTANCE_ID); const database = instance.database(RESTORE_DATABASE_ID); + while ((await database.getState()) === 'READY_OPTIMIZING') { await sleep(1000); } const output = execSync( - `${backupsCmd} deleteBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} deleteBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}` ); assert.match(output, /Backup deleted./); }); - // custom_timeout_and_retry - it('should insert with custom timeout and retry settings', async () => { - const output = execSync( - `${dmlCmd} insertWithCustomTimeoutAndRetrySettings ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, /record inserted./); - }); - - // get_commit_stats - it('should update rows in Albums example table and return CommitStats', async () => { - const output = execSync( - `${crudCmd} getCommitStats ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('Updated data with (\\d+) mutations')); - }); - // create_database_with_version_retention_period it('should create a database with a version retention period', async () => { const output = execSync( @@ -1429,247 +946,65 @@ describe('Spanner', () => { }); after(async () => { - await spanner - .instance(INSTANCE_ID) - .database(SEQUENCE_DATABASE_ID) - .delete(); - }); - - // create_sequence - it('should create a sequence', async () => { - const output = execSync( - `node sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Created Seq sequence and Customers table') - ); - assert.match( - output, - new RegExp('Number of customer records inserted is: 3') - ); - }); - - // alter_sequence - it('should alter a sequence', async () => { - const output = execSync( - `node sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' - ) - ); - assert.match( - output, - new RegExp('Number of customer records inserted is: 3') - ); - }); - - // drop_sequence - it('should drop a sequence', async () => { - const output = execSync( - `node sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' - ) - ); - }); - }); - - describe('leader options', () => { - before(async () => { - const instance = spanner.instance(SAMPLE_INSTANCE_ID); - const [, operation] = await instance.create({ - config: 'nam6', - nodes: 1, - displayName: 'Multi-region options test', - labels: { - ['cloud_spanner_samples']: 'true', - created: Math.round(Date.now() / 1000).toString(), // current time - }, - }); - await operation.promise(); - }); - - after(async () => { - const instance = spanner.instance(SAMPLE_INSTANCE_ID); - await instance.delete(); - }); - - // create_instance_config - it('should create an example custom instance config', async () => { - const output = execSync( - `node instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` - ) - ); - assert.match( - output, - new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) - ); - }); - - // update_instance_config - it('should update an example custom instance config', async () => { - const output = execSync( - `node instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` - ) - ); - assert.match( - output, - new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) - ); - }); - - // delete_instance_config - it('should delete an example custom instance config', async () => { - const output = execSync( - `node instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`) - ); - assert.match( - output, - new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) - ); - }); - - // list_instance_config_operations - it('should list all instance config operations', async () => { - const output = execSync( - `node instance-config-get-operations.js ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - `Available instance config operations for project ${PROJECT_ID}:` - ) - ); - assert.include(output, 'Instance config operation for'); - assert.include( - output, - 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' - ); - }); - - // list_instance_configs - it('should list available instance configs', async () => { - const output = execSync(`node list-instance-configs.js ${PROJECT_ID}`); - assert.match( - output, - new RegExp(`Available instance configs for project ${PROJECT_ID}:`) - ); - assert.include(output, 'Available leader options for instance config'); - }); - - // get_instance_config - // TODO: Enable when the feature has been released. - it.skip('should get a specific instance config', async () => { - const output = execSync(`node get-instance-config.js ${PROJECT_ID}`); - assert.include(output, 'Available leader options for instance config'); - }); - - // create_database_with_default_leader - it('should create a database with a default leader', async () => { - const output = execSync( - `node database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...` - ) - ); - assert.match( - output, - new RegExp( - `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.` - ) - ); - }); - - // update_database_with_default_leader - it('should update a database with a default leader', async () => { - const output = execSync( - `node database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...` - ) - ); - assert.match( - output, - new RegExp( - `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.` - ) - ); + await spanner + .instance(INSTANCE_ID) + .database(SEQUENCE_DATABASE_ID) + .delete(); }); - // get_default_leader - it('should get the default leader option of a database', async () => { + // create_sequence + it('should create a sequence', async () => { const output = execSync( - `node database-get-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + `node sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` ); - assert.include( + assert.match( output, - `The default_leader for ${DEFAULT_LEADER_DATABASE_ID} is ${DEFAULT_LEADER_2}` + new RegExp('Created Seq sequence and Customers table') + ); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') ); }); - // list_databases - it('should list databases on the instance', async () => { + // alter_sequence + it('should alter a sequence', async () => { const output = execSync( - `node list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `node sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` ); assert.match( output, new RegExp( - `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:` + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' ) ); - assert.include(output, `(default leader = ${DEFAULT_LEADER_2}`); + assert.match( + output, + new RegExp('Number of customer records inserted is: 3') + ); }); - // get_database_ddl - it('should get the ddl of a database', async () => { + // drop_sequence + it('should drop a sequence', async () => { const output = execSync( - `node database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + `node sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` ); assert.match( output, new RegExp( - `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:` + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' ) ); - assert.include(output, 'CREATE TABLE Singers'); }); }); - describe('postgreSQL', () => { + describe('leader options', () => { before(async () => { const instance = spanner.instance(SAMPLE_INSTANCE_ID); const [, operation] = await instance.create({ - config: PG_LOCATION_ID, + config: 'nam6', nodes: 1, - displayName: 'PostgreSQL Test', + displayName: 'Multi-region options test', labels: { ['cloud_spanner_samples']: 'true', created: Math.round(Date.now() / 1000).toString(), // current time @@ -1683,327 +1018,171 @@ describe('Spanner', () => { await instance.delete(); }); - // create_pg_database - it('should create an example PostgreSQL database', async () => { + // create_instance_config + it('should create an example custom instance config', async () => { const output = execSync( - `node pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp( + `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` + ) ); assert.match( output, - new RegExp( - `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.` - ) + new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) ); }); - // pg_interleaving - it('should create an interleaved table hierarchy using PostgreSQL dialect', async () => { + // update_instance_config + it('should update an example custom instance config', async () => { const output = execSync( - `node pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + `node instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` ); assert.match( output, new RegExp( - `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.` + `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` ) ); - }); - - // pg_dml_with_parameter - it('should execute a DML statement with parameters on a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-dml-with-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); assert.match( output, - new RegExp('Successfully executed 1 postgreSQL statements using DML') + new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) ); }); - // pg_dml_batch - it('should execute a batch of DML statements on a Spanner PostgreSQL database', async () => { + // delete_instance_config + it('should delete an example custom instance config', async () => { const output = execSync( - `node pg-dml-batch.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` ); assert.match( output, - new RegExp( - 'Successfully executed 3 postgreSQL statements using Batch DML.' - ) - ); - }); - - // pg_dml_partitioned - it('should execute a partitioned DML on a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-dml-partitioned.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('Successfully deleted 1 record.')); - }); - - // pg_query_with_parameters - it('should execute a query with parameters on a Spanner PostgreSQL database.', async () => { - const output = execSync( - `node pg-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`) ); assert.match( output, - new RegExp('SingerId: 1, FirstName: Alice, LastName: Henderson') + new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) ); }); - // pg_dml_update - it('should update a table using parameterized queries on a Spanner PostgreSQL database.', async () => { + // list_instance_config_operations + it('should list all instance config operations', async () => { const output = execSync( - `node pg-dml-getting-started-update.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node instance-config-get-operations.js ${PROJECT_ID}` ); assert.match( output, - new RegExp('Successfully updated 1 record in the Singers table.') - ); - }); - - // pg_add_column - it('should add a column to a table in the Spanner PostgreSQL database.', async () => { - const output = execSync( - `node pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + new RegExp( + `Getting list of instance config operations on project ${PROJECT_ID}...\n` + ) ); assert.match( output, new RegExp( - `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}` + `Available instance config operations for project ${PROJECT_ID}:` ) ); - }); - - //pg_create_index - it('should create an index in the Spanner PostgreSQL database.', async () => { - const output = execSync( - `node pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('Added the AlbumsByAlbumTitle index.')); - }); - - // pg_schema_information - it('should query the information schema metadata in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-schema-information.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + assert.include(output, 'Instance config operation for'); + assert.include( + output, + 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' ); - assert.match(output, new RegExp('Table: public.albums')); - assert.match(output, new RegExp('Table: public.author')); - assert.match(output, new RegExp('Table: public.book')); - assert.match(output, new RegExp('Table: public.singers')); }); - // pg_ordering_nulls - it('should order nulls as per clause in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-ordering-nulls.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('Author ORDER BY FirstName')); - assert.match(output, new RegExp('Author ORDER BY FirstName DESC')); - assert.match(output, new RegExp('Author ORDER BY FirstName NULLS FIRST')); + // list_instance_configs + it('should list available instance configs', async () => { + const output = execSync(`node list-instance-configs.js ${PROJECT_ID}`); assert.match( output, - new RegExp('Author ORDER BY FirstName DESC NULLS LAST') + new RegExp(`Available instance configs for project ${PROJECT_ID}:`) ); + assert.include(output, 'Available leader options for instance config'); }); - // pg_numeric_data_type - it('should create a table, insert and query pg numeric data', async () => { - const output = execSync( - `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) - ); - assert.match( - output, - new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`) - ); - assert.match(output, new RegExp('Inserted data.')); - assert.match(output, new RegExp('VenueId: 4, Revenue: 97372.3863')); - assert.match(output, new RegExp('VenueId: 19, Revenue: 7629')); - assert.match(output, new RegExp('VenueId: 398, Revenue: 0.000000123')); + // get_instance_config + // TODO: Enable when the feature has been released. + it.skip('should get a specific instance config', async () => { + const output = execSync(`node get-instance-config.js ${PROJECT_ID}`); + assert.include(output, 'Available leader options for instance config'); }); - // pg_jsonb_add_column - it('should add a jsonb column to a table', async () => { + // create_database_with_default_leader + it('should create a database with a default leader', async () => { const output = execSync( - `node pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + `node database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}` ); assert.match( output, new RegExp( - `Added jsonb column to table venues to database ${PG_DATABASE_ID}.` + `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...` ) ); - }); - - // pg_jsonb_insert_data - it('should insert pg jsonb data', async () => { - const output = execSync( - `node pg-jsonb-update-data.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('Updated data.')); - }); - - // pg_jsonb_query_data - it('should query pg jsonb data', async () => { - const output = execSync( - `node pg-jsonb-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('VenueId: 19, Details: {"value":{"open":true,"rating":9}}') - ); - }); - - // pg_case_sensitivity - it('should create case sensitive table and query the information in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-case-sensitivity.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); assert.match( output, new RegExp( - `Created table with case sensitive names in database ${PG_DATABASE_ID} using PostgreSQL dialect.` + `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.` ) ); - assert.match(output, new RegExp('Inserted data using mutations.')); - assert.match(output, new RegExp('Concerts Table Data using Mutations:')); - assert.match(output, new RegExp('Concerts Table Data using Aliases:')); - assert.match(output, new RegExp('Inserted data using DML.')); - }); - - // pg_datatypes_casting - it('should use cast operator to cast from one data type to another in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-datatypes-casting.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('Data types after casting')); - }); - - // pg_functions - it('should call a server side function on a Spanner PostgreSQL database.', async () => { - const output = execSync( - `node pg-functions.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match(output, new RegExp('1284352323 seconds after epoch is')); - }); - - // pg_dml_returning_insert - it('should insert records using DML Returning in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-dml-returning-insert ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully inserted 1 record into the Singers table') - ); - assert.match(output, new RegExp('Virginia Watson')); - }); - - // pg_dml_returning_update - it('should update records using DML Returning in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-dml-returning-update ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully updated 1 record into the Singers table') - ); - assert.match(output, new RegExp('Virginia1 Watson1')); }); - // pg_dml_returning_delete - it('should delete records using DML Returning in a Spanner PostgreSQL database', async () => { - const output = execSync( - `node pg-dml-returning-delete ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp('Successfully deleted 1 record from the Singers table') - ); - assert.match(output, new RegExp('Virginia1 Watson1')); - }); - - // pg_create_sequence - it('should create a sequence', async () => { + // update_database_with_default_leader + it('should update a database with a default leader', async () => { const output = execSync( - `node pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}` ); assert.match( output, - new RegExp('Created Seq sequence and Customers table') + new RegExp( + `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...` + ) ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp( + `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.` + ) ); }); - // pg_alter_sequence - it('should alter a sequence', async () => { + // get_default_leader + it('should get the default leader option of a database', async () => { const output = execSync( - `node pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` - ); - assert.match( - output, - new RegExp( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' - ) + `node database-get-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` ); - assert.match( + assert.include( output, - new RegExp('Number of customer records inserted is: 3') + `The default_leader for ${DEFAULT_LEADER_DATABASE_ID} is ${DEFAULT_LEADER_2}` ); }); - // pg_drop_sequence - it('should drop a sequence', async () => { + // list_databases + it('should list databases on the instance', async () => { const output = execSync( - `node pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` ); assert.match( output, new RegExp( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:` ) ); + assert.include(output, `(default leader = ${DEFAULT_LEADER_2}`); }); - // directed_read_options - it('should run read-only transaction with directed read options set', async () => { + // get_database_ddl + it('should get the ddl of a database', async () => { const output = execSync( - `node directed-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` - ); - console.log(output); - assert.match( - output, - new RegExp( - 'SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace' - ) + `node database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` ); assert.match( output, new RegExp( - 'Successfully executed read-only transaction with directedReadOptions' + `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:` ) ); + assert.include(output, 'CREATE TABLE Singers'); }); }); }); diff --git a/samples/table-alter-with-foreign-key-delete-cascade.js b/samples/table-alter-with-foreign-key-delete-cascade.js index 444e864a3..f3bf4e466 100644 --- a/samples/table-alter-with-foreign-key-delete-cascade.js +++ b/samples/table-alter-with-foreign-key-delete-cascade.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,9 +21,6 @@ function main(instanceId, databaseId, projectId) { // [START spanner_alter_table_with_foreign_key_delete_cascade] - // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); - /** * TODO(developer): Uncomment the following lines before running the sample. */ @@ -31,23 +28,32 @@ function main(instanceId, databaseId, projectId) { // const instanceId = 'my-instance-id'; // const databaseId = 'my-database-id'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function alterTableWithForeignKeyDeleteCascade() { - const [operation] = await database.updateSchema([ + const request = [ `ALTER TABLE ShoppingCarts - ADD CONSTRAINT FKShoppingCartsCustomerName - FOREIGN KEY (CustomerName) - REFERENCES Customers(CustomerName) - ON DELETE CASCADE`, - ]); + ADD CONSTRAINT FKShoppingCartsCustomerName + FOREIGN KEY (CustomerName) + REFERENCES Customers(CustomerName) + ON DELETE CASCADE`, + ]; + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); diff --git a/samples/table-create-with-foreign-key-delete-cascade.js b/samples/table-create-with-foreign-key-delete-cascade.js index a41faf7f5..b0e9b215e 100644 --- a/samples/table-create-with-foreign-key-delete-cascade.js +++ b/samples/table-create-with-foreign-key-delete-cascade.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,9 +21,6 @@ function main(instanceId, databaseId, projectId) { // [START spanner_create_table_with_foreign_key_delete_cascade] - // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); - /** * TODO(developer): Uncomment the following lines before running the sample. */ @@ -31,29 +28,38 @@ function main(instanceId, databaseId, projectId) { // const instanceId = 'my-instance-id'; // const databaseId = 'my-database-id'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function createTableWithForeignKeyDeleteCascade() { - const [operation] = await database.updateSchema([ + const request = [ `CREATE TABLE Customers ( - CustomerId INT64, - CustomerName STRING(62) NOT NULL - ) PRIMARY KEY (CustomerId)`, + CustomerId INT64, + CustomerName STRING(62) NOT NULL + ) PRIMARY KEY (CustomerId)`, `CREATE TABLE ShoppingCarts ( - CartId INT64 NOT NULL, - CustomerId INT64 NOT NULL, - CustomerName STRING(62) NOT NULL, - CONSTRAINT FKShoppingCartsCustomerId FOREIGN KEY (CustomerId) - REFERENCES Customers (CustomerId) ON DELETE CASCADE, - ) PRIMARY KEY (CartId)`, - ]); + CartId INT64 NOT NULL, + CustomerId INT64 NOT NULL, + CustomerName STRING(62) NOT NULL, + CONSTRAINT FKShoppingCartsCustomerId FOREIGN KEY (CustomerId) + REFERENCES Customers (CustomerId) ON DELETE CASCADE, + ) PRIMARY KEY (CartId)`, + ]; + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); diff --git a/samples/table-drop-foreign-key-constraint-delete-cascade.js b/samples/table-drop-foreign-key-constraint-delete-cascade.js index def4292c2..ac9fc5699 100644 --- a/samples/table-drop-foreign-key-constraint-delete-cascade.js +++ b/samples/table-drop-foreign-key-constraint-delete-cascade.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,9 +21,6 @@ function main(instanceId, databaseId, projectId) { // [START spanner_drop_foreign_key_constraint_delete_cascade] - // Imports the Google Cloud client library - const {Spanner} = require('@google-cloud/spanner'); - /** * TODO(developer): Uncomment the following lines before running the sample. */ @@ -31,20 +28,30 @@ function main(instanceId, databaseId, projectId) { // const instanceId = 'my-instance-id'; // const databaseId = 'my-database-id'; - // Creates a client + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // creates a client const spanner = new Spanner({ projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and a database. The database does not need to exist. - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + const databaseAdminClient = spanner.getDatabaseAdminClient(); async function dropForeignKeyConstraintDeleteCascade() { - const [operation] = await database.updateSchema([ + const request = [ `ALTER TABLE ShoppingCarts - DROP CONSTRAINT FKShoppingCartsCustomerName`, - ]); + DROP CONSTRAINT FKShoppingCartsCustomerName`, + ]; + + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); diff --git a/samples/timestamp.js b/samples/timestamp.js index 63473a066..0f83f0e98 100644 --- a/samples/timestamp.js +++ b/samples/timestamp.js @@ -31,9 +31,8 @@ async function createTableWithTimestamp(instanceId, databaseId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); // Note: Cloud Spanner interprets Node.js numbers as FLOAT64s, so they // must be converted to strings before being inserted as INT64s @@ -49,7 +48,14 @@ async function createTableWithTimestamp(instanceId, databaseId, projectId) { ]; // Creates a table in an existing database - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log(`Waiting for operation on ${databaseId} to complete...`); @@ -185,9 +191,8 @@ async function addTimestampColumn(instanceId, databaseId, projectId) { projectId: projectId, }); - // Gets a reference to a Cloud Spanner instance and database - const instance = spanner.instance(instanceId); - const database = instance.database(databaseId); + // Gets a reference to a Cloud Spanner Database Admin Client object + const databaseAdminClient = spanner.getDatabaseAdminClient(); const request = [ `ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP OPTIONS @@ -196,7 +201,14 @@ async function addTimestampColumn(instanceId, databaseId, projectId) { // Adds a new commit timestamp column to the Albums table try { - const [operation] = await database.updateSchema(request); + const [operation] = await databaseAdminClient.updateDatabaseDdl({ + database: databaseAdminClient.databasePath( + projectId, + instanceId, + databaseId + ), + statements: request, + }); console.log('Waiting for operation to complete...'); @@ -208,8 +220,9 @@ async function addTimestampColumn(instanceId, databaseId, projectId) { } catch (err) { console.error('ERROR:', err); } finally { - // Close the database when finished. - database.close(); + // Close the spanner client when finished. + // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient. + spanner.close(); } // [END spanner_add_timestamp_column] } diff --git a/src/codec.ts b/src/codec.ts index 663a76873..ac018b310 100644 --- a/src/codec.ts +++ b/src/codec.ts @@ -629,6 +629,10 @@ function getType(value: Value): Type { return {type: 'bool'}; } + if (is.string(value)) { + return {type: 'string'}; + } + if (Buffer.isBuffer(value)) { return {type: 'bytes'}; } @@ -671,7 +675,6 @@ function getType(value: Value): Type { return {type: 'json'}; } - // String type is also returned as unspecified to allow untyped parameters return {type: 'unspecified'}; } diff --git a/src/index.ts b/src/index.ts index 2f2b1db3e..eaa44f408 100644 --- a/src/index.ts +++ b/src/index.ts @@ -46,7 +46,13 @@ import { CreateInstanceConfigCallback, CreateInstanceConfigResponse, } from './instance-config'; -import {grpc, GrpcClientOptions, CallOptions, GoogleError} from 'google-gax'; +import { + grpc, + GrpcClientOptions, + CallOptions, + GoogleError, + ClientOptions, +} from 'google-gax'; import {google, google as instanceAdmin} from '../protos/protos'; import { PagedOptions, @@ -347,6 +353,54 @@ class Spanner extends GrpcService { this.directedReadOptions = directedReadOptions; } + /** + * Gets the InstanceAdminClient object. + * The returned InstanceAdminClient object is a shared, managed instance and should not be manually closed. + * @returns {v1.InstanceAdminClient} The InstanceAdminClient object + * @example + * ``` + * const {Spanner} = require('@google-cloud/spanner'); + * const spanner = new Spanner({ + * projectId: projectId, + * }); + * const instanceAdminClient = spanner.getInstanceAdminClient(); + * ``` + */ + getInstanceAdminClient(): v1.InstanceAdminClient { + const clientName = 'InstanceAdminClient'; + if (!this.clients_.has(clientName)) { + this.clients_.set( + clientName, + new v1[clientName](this.options as ClientOptions) + ); + } + return this.clients_.get(clientName)! as v1.InstanceAdminClient; + } + + /** + * Gets the DatabaseAdminClient object. + * The returned DatabaseAdminClient object is a managed, shared instance and should not be manually closed. + * @returns {v1.DatabaseAdminClient} The DatabaseAdminClient object. + * @example + * ``` + * const {Spanner} = require('@google-cloud/spanner'); + * const spanner = new Spanner({ + * projectId: projectId, + * }); + * const databaseAdminClient = spanner.getDatabaseAdminClient(); + * ``` + */ + getDatabaseAdminClient(): v1.DatabaseAdminClient { + const clientName = 'DatabaseAdminClient'; + if (!this.clients_.has(clientName)) { + this.clients_.set( + clientName, + new v1[clientName](this.options as ClientOptions) + ); + } + return this.clients_.get(clientName)! as v1.DatabaseAdminClient; + } + /** Closes this Spanner client and cleans up all resources used by it. */ close(): void { this.clients_.forEach(c => { @@ -1741,6 +1795,8 @@ promisifyAll(Spanner, { 'pgJsonb', 'operation', 'timestamp', + 'getInstanceAdminClient', + 'getDatabaseAdminClient', ], }); diff --git a/src/transaction.ts b/src/transaction.ts index 4eb564459..11eb03657 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -1300,10 +1300,7 @@ export class Snapshot extends EventEmitter { if (!is.empty(typeMap)) { Object.keys(typeMap).forEach(param => { const type = typeMap[param]; - const typeObject = codec.createTypeObject(type); - if (typeObject.code !== 'TYPE_CODE_UNSPECIFIED') { - paramTypes[param] = codec.createTypeObject(type); - } + paramTypes[param] = codec.createTypeObject(type); }); } diff --git a/system-test/spanner.ts b/system-test/spanner.ts index 011cba1f5..6690b5a18 100644 --- a/system-test/spanner.ts +++ b/system-test/spanner.ts @@ -231,6 +231,115 @@ describe('Spanner', () => { ); }); + describe('Autogenerated Admin Client', async () => { + const projectId = process.env.GCLOUD_PROJECT; + const instanceId = envInstanceName + ? envInstanceName + : generateName('instance'); + const DATABASE = generateName('database'); + const instanceAdminClient = spanner.getInstanceAdminClient(); + const databaseAdminClient = spanner.getDatabaseAdminClient(); + + before(async () => { + assert(projectId); + if (generateInstanceForTest) { + const [operation] = await instanceAdminClient.createInstance({ + parent: instanceAdminClient.projectPath(projectId), + instanceId: instanceId, + instance: { + config: instanceAdminClient.instanceConfigPath( + projectId, + 'regional-us-central1' + ), + nodeCount: 1, + displayName: instanceId, + labels: { + cloud_spanner_samples: 'true', + created: Math.round(Date.now() / 1000).toString(), // current time + }, + }, + }); + const [instance] = await operation.promise(); + RESOURCES_TO_CLEAN.push(instance as Instance); + } else { + console.log( + `Not creating temp instance, using + ${instanceAdminClient.instancePath( + projectId, + envInstanceName + )}...` + ); + } + const [operation] = await databaseAdminClient.createDatabase({ + createStatement: 'CREATE DATABASE `' + DATABASE + '`', + extraStatements: [ + `CREATE TABLE ${TABLE_NAME} ( + SingerId STRING(1024) NOT NULL, + Name STRING(1024), + ) PRIMARY KEY(SingerId)`, + ], + parent: databaseAdminClient.instancePath(projectId, instanceId), + }); + await operation.promise(); + }); + + describe('Instances', () => { + it('should have created the instance', async () => { + assert(projectId); + try { + const [metadata] = await instanceAdminClient.getInstance({ + name: instanceAdminClient.instancePath(projectId, instanceId), + }); + assert.strictEqual( + metadata!.name, + instanceAdminClient.instancePath(projectId, instanceId) + ); + } catch (err) { + if (!err) { + assert.ifError(err); + } + } + }); + + it('should list the instances', async () => { + assert(projectId); + const [instances] = await instanceAdminClient.listInstances({ + parent: instanceAdminClient.projectPath(projectId), + }); + assert(instances!.length > 0); + }); + }); + + describe('Databases', () => { + async function createDatabase(database, dialect) { + assert(projectId); + const [metadata] = await databaseAdminClient.getDatabase({ + name: databaseAdminClient.databasePath( + projectId, + instanceId, + database + ), + }); + assert.strictEqual( + metadata!.name, + databaseAdminClient.databasePath(projectId, instanceId, database) + ); + assert.strictEqual(metadata!.state, 'READY'); + if (IS_EMULATOR_ENABLED) { + assert.strictEqual( + metadata!.databaseDialect, + 'DATABASE_DIALECT_UNSPECIFIED' + ); + } else { + assert.strictEqual(metadata!.databaseDialect, dialect); + } + } + + it('GOOGLE_STANDARD_SQL should have created the database', async () => { + createDatabase(DATABASE, 'GOOGLE_STANDARD_SQL'); + }); + }); + }); + describe('types', () => { const TABLE_NAME = 'TypeCheck'; const googleSqlTable = DATABASE.table(TABLE_NAME); @@ -275,47 +384,6 @@ describe('Spanner', () => { }); }); } - function readUntypedData(column, value, dialect, callback) { - const id = generateName('id'); - const insertData = { - Key: id, - [column]: value, - }; - - let table = googleSqlTable; - let query: ExecuteSqlRequest = { - sql: 'SELECT * FROM `' + table.name + '` WHERE ' + column + ' = @value', - params: { - value, - }, - }; - let database = DATABASE; - if (dialect === Spanner.POSTGRESQL) { - table = postgreSqlTable; - query = { - sql: 'SELECT * FROM ' + table.name + ' WHERE "' + column + '" = $1', - params: { - p1: value, - }, - }; - database = PG_DATABASE; - } - table.insert(insertData, (err, insertResp) => { - if (err) { - callback(err); - return; - } - - database.run(query, (err, rows, readResp) => { - if (err) { - callback(err); - return; - } - - callback(null, rows.shift(), insertResp, readResp); - }); - }); - } before(async () => { if (IS_EMULATOR_ENABLED) { @@ -798,33 +866,6 @@ describe('Spanner', () => { done(); }); }); - - it('GOOGLE_STANDARD_SQL should read untyped int64 values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'IntValue', - '5', - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().IntValue, 5); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped int64 values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData('IntValue', '5', Spanner.POSTGRESQL, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().IntValue, 5); - done(); - }); - }); }); describe('oids', () => { @@ -1005,33 +1046,6 @@ describe('Spanner', () => { done(); }); }); - - it('GOOGLE_STANDARD_SQL should read untyped float64 values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'FloatValue', - 5.6, - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatValue, 5.6); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped float64 values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData('FloatValue', 5.6, Spanner.POSTGRESQL, (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().FloatValue, 5.6); - done(); - }); - }); }); describe('numerics', () => { @@ -1182,44 +1196,6 @@ describe('Spanner', () => { done(); }); }); - - it('GOOGLE_STANDARD_SQL should read untyped numeric values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'NumericValue', - '5.623', - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual( - row.toJSON().NumericValue.value, - Spanner.numeric('5.623').value - ); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped numeric values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'NumericValue', - '5.623', - Spanner.POSTGRESQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual( - row.toJSON().NumericValue, - Spanner.pgNumeric(5.623) - ); - done(); - } - ); - }); }); describe('strings', () => { @@ -1321,38 +1297,6 @@ describe('Spanner', () => { } ); }); - - it('GOOGLE_STANDARD_SQL should read untyped string values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'StringValue', - 'hello', - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().StringValue, 'hello'); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped string values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'StringValue', - 'hello', - Spanner.POSTGRESQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().StringValue, 'hello'); - done(); - } - ); - }); }); describe('bytes', () => { @@ -1454,38 +1398,6 @@ describe('Spanner', () => { done(); }); }); - - it('GOOGLE_STANDARD_SQL should read untyped bytes values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'BytesValue', - Buffer.from('b'), - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().BytesValue, Buffer.from('b')); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped bytes values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'BytesValue', - Buffer.from('b'), - Spanner.POSTGRESQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual(row.toJSON().BytesValue, Buffer.from('b')); - done(); - } - ); - }); }); describe('jsons', () => { @@ -1664,46 +1576,6 @@ describe('Spanner', () => { done(); }); }); - - it('GOOGLE_STANDARD_SQL should read untyped timestamp values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'TimestampValue', - '2014-09-27T12:30:00.45Z', - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - const time = row.toJSON().TimestampValue.getTime(); - assert.strictEqual( - time, - Spanner.timestamp('2014-09-27T12:30:00.45Z').getTime() - ); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped timestamp values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'TimestampValue', - '2014-09-27T12:30:00.45Z', - Spanner.POSTGRESQL, - (err, row) => { - assert.ifError(err); - const time = row.toJSON().TimestampValue.getTime(); - assert.strictEqual( - time, - Spanner.timestamp('2014-09-27T12:30:00.45Z').getTime() - ); - done(); - } - ); - }); }); describe('dates', () => { @@ -1810,44 +1682,6 @@ describe('Spanner', () => { done(); }); }); - - it('GOOGLE_STANDARD_SQL should read untyped date values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'DateValue', - '2014-09-27', - Spanner.GOOGLE_STANDARD_SQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual( - Spanner.date(row.toJSON().DateValue), - Spanner.date('2014-09-27') - ); - done(); - } - ); - }); - - it('POSTGRESQL should read untyped date values', function (done) { - if (IS_EMULATOR_ENABLED) { - this.skip(); - } - readUntypedData( - 'DateValue', - '2014-09-27', - Spanner.POSTGRESQL, - (err, row) => { - assert.ifError(err); - assert.deepStrictEqual( - Spanner.date(row.toJSON().DateValue), - Spanner.date('2014-09-27') - ); - done(); - } - ); - }); }); describe('jsonb', () => { @@ -5273,9 +5107,6 @@ describe('Spanner', () => { params: { v: 'abc', }, - types: { - v: 'string', - }, }; stringQuery(done, DATABASE, query, 'abc'); }); @@ -5330,12 +5161,6 @@ describe('Spanner', () => { params: { v: values, }, - types: { - v: { - type: 'array', - child: 'string', - }, - }, }; DATABASE.run(query, (err, rows) => { @@ -5788,21 +5613,6 @@ describe('Spanner', () => { }), p4: Spanner.int(10), }, - types: { - structParam: { - type: 'struct', - fields: [ - { - name: 'userf', - type: 'string', - }, - { - name: 'threadf', - type: 'int64', - }, - ], - }, - }, }; DATABASE.run(query, (err, rows) => { @@ -5858,23 +5668,6 @@ describe('Spanner', () => { }), }), }, - types: { - structParam: { - type: 'struct', - fields: [ - { - name: 'structf', - type: 'struct', - fields: [ - { - name: 'nestedf', - type: 'string', - }, - ], - }, - ], - }, - }, }; DATABASE.run(query, (err, rows) => { @@ -6046,21 +5839,6 @@ describe('Spanner', () => { userf: 'bob', }), }, - types: { - structParam: { - type: 'struct', - fields: [ - { - name: 'threadf', - type: 'int64', - }, - { - name: 'userf', - type: 'string', - }, - ], - }, - }, }; DATABASE.run(query, (err, rows) => { @@ -6082,21 +5860,6 @@ describe('Spanner', () => { threadf: Spanner.int(1), }), }, - types: { - structParam: { - type: 'struct', - fields: [ - { - name: 'userf', - type: 'string', - }, - { - name: 'threadf', - type: 'int64', - }, - ], - }, - }, }; DATABASE.run(query, (err, rows) => { diff --git a/test/codec.ts b/test/codec.ts index 64085fce3..b604e4354 100644 --- a/test/codec.ts +++ b/test/codec.ts @@ -969,7 +969,7 @@ describe('codec', () => { }); it('should determine if the value is a string', () => { - assert.deepStrictEqual(codec.getType('abc'), {type: 'unspecified'}); + assert.deepStrictEqual(codec.getType('abc'), {type: 'string'}); }); it('should determine if the value is bytes', () => { @@ -1006,7 +1006,7 @@ describe('codec', () => { assert.deepStrictEqual(type, { type: 'struct', - fields: [{name: 'a', type: 'unspecified'}], + fields: [{name: 'a', type: 'string'}], }); }); diff --git a/test/index.ts b/test/index.ts index 70f68857e..cd60bbbcd 100644 --- a/test/index.ts +++ b/test/index.ts @@ -91,6 +91,8 @@ const fakePfy = extend({}, pfy, { 'pgJsonb', 'operation', 'timestamp', + 'getInstanceAdminClient', + 'getDatabaseAdminClient', ]); }, }); diff --git a/test/spanner.ts b/test/spanner.ts index 32a1b8e58..38f1105d3 100644 --- a/test/spanner.ts +++ b/test/spanner.ts @@ -960,6 +960,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(request.paramTypes!['int64'].code, 'INT64'); assert.strictEqual(request.paramTypes!['float64'].code, 'FLOAT64'); assert.strictEqual(request.paramTypes!['numeric'].code, 'NUMERIC'); + assert.strictEqual(request.paramTypes!['string'].code, 'STRING'); assert.strictEqual(request.paramTypes!['bytes'].code, 'BYTES'); assert.strictEqual(request.paramTypes!['json'].code, 'JSON'); assert.strictEqual(request.paramTypes!['date'].code, 'DATE'); diff --git a/test/transaction.ts b/test/transaction.ts index 8a40f4230..11a8647e0 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -1120,11 +1120,10 @@ describe('Transaction', () => { }); it('should guess missing param types', () => { - const fakeParams = {a: true, b: 3}; + const fakeParams = {a: 'foo', b: 3}; const fakeTypes = {b: 'number'}; - const fakeMissingType = {type: 'boolean'}; - const expectedMissingType = {code: google.spanner.v1.TypeCode.BOOL}; - const expectedKnownType = {code: google.spanner.v1.TypeCode.INT64}; + const fakeMissingType = {type: 'string'}; + const expectedType = {code: google.spanner.v1.TypeCode.STRING}; sandbox .stub(codec, 'getType') @@ -1133,17 +1132,15 @@ describe('Transaction', () => { sandbox .stub(codec, 'createTypeObject') - .withArgs('number') - .returns(expectedKnownType as google.spanner.v1.Type) .withArgs(fakeMissingType) - .returns(expectedMissingType as google.spanner.v1.Type); + .returns(expectedType as google.spanner.v1.Type); const {paramTypes} = Snapshot.encodeParams({ params: fakeParams, types: fakeTypes, }); - assert.strictEqual(paramTypes.a, expectedMissingType); + assert.strictEqual(paramTypes.a, expectedType); }); }); }); @@ -1270,17 +1267,17 @@ describe('Transaction', () => { const OBJ_STATEMENTS = [ { - sql: 'INSERT INTO TxnTable (Key, BoolValue) VALUES(@key, @bool)', + sql: 'INSERT INTO TxnTable (Key, StringValue) VALUES(@key, @str)', params: { - key: 999, - bool: true, + key: 'k999', + str: 'abc', }, }, { - sql: 'UPDATE TxnTable t SET t.BoolValue = @bool WHERE t.Key = @key', + sql: 'UPDATE TxnTable t SET t.StringValue = @str WHERE t.Key = @key', params: { - key: 999, - bool: false, + key: 'k999', + str: 'abcd', }, }, ]; @@ -1290,26 +1287,26 @@ describe('Transaction', () => { sql: OBJ_STATEMENTS[0].sql, params: { fields: { - key: {stringValue: OBJ_STATEMENTS[0].params.key.toString()}, - bool: {boolValue: OBJ_STATEMENTS[0].params.bool}, + key: {stringValue: OBJ_STATEMENTS[0].params.key}, + str: {stringValue: OBJ_STATEMENTS[0].params.str}, }, }, paramTypes: { - key: {code: 'INT64'}, - bool: {code: 'BOOL'}, + key: {code: 'STRING'}, + str: {code: 'STRING'}, }, }, { sql: OBJ_STATEMENTS[1].sql, params: { fields: { - key: {stringValue: OBJ_STATEMENTS[1].params.key.toString()}, - bool: {boolValue: OBJ_STATEMENTS[1].params.bool}, + key: {stringValue: OBJ_STATEMENTS[1].params.key}, + str: {stringValue: OBJ_STATEMENTS[1].params.str}, }, }, paramTypes: { - key: {code: 'INT64'}, - bool: {code: 'BOOL'}, + key: {code: 'STRING'}, + str: {code: 'STRING'}, }, }, ];