diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2aefd0e91..81f87c569 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index b2016d119..8b37ee897 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 000000000..1e27e789a --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,20 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/api-bigtable + - googleapis/api-bigtable-partners + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/api-bigtable + - googleapis/api-bigtable-partners + +assign_prs: + - googleapis/api-bigtable + - googleapis/api-bigtable-partners diff --git a/.kokoro/build.sh b/.kokoro/build.sh index dec6b66a7..b2212fce8 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 8e39a2cc4..bdaf39fe2 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000..816817c67 --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000..0e5d70f20 --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index ec867d9fd..fff4d9ce0 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8c11c9f3e..51f92b8e1 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -251,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b94f3df9f..ab46db83e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.23.0" + ".": "2.23.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ea8a8525d..0731c14a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigtable/#history +## [2.23.1](https://github.com/googleapis/python-bigtable/compare/v2.23.0...v2.23.1) (2024-04-15) + + +### Bug Fixes + +* Use insecure grpc channel with emulator ([#946](https://github.com/googleapis/python-bigtable/issues/946)) ([aa31706](https://github.com/googleapis/python-bigtable/commit/aa3170663f9bd09d70c99d4e76c07f7f293ad935)) + ## [2.23.0](https://github.com/googleapis/python-bigtable/compare/v2.22.0...v2.23.0) (2024-02-07) diff --git a/README.rst b/README.rst index 2bc151e95..69856e05b 100644 --- a/README.rst +++ b/README.rst @@ -21,18 +21,16 @@ Analytics, Maps, and Gmail. .. _Product Documentation: https://cloud.google.com/bigtable/docs -Preview Async Data Client +Async Data Client ------------------------- -:code:`v2.23.0` includes a preview release of the new :code:`BigtableDataClientAsync` client, accessible at the import path +:code:`v2.23.0` includes a release of the new :code:`BigtableDataClientAsync` client, accessible at the import path :code:`google.cloud.bigtable.data`. The new client brings a simplified API and increased performance using asyncio, with a corresponding synchronous surface coming soon. The new client is focused on the data API (i.e. reading and writing Bigtable data), with admin operations remaining in the existing client. -:code:`BigtableDataClientAsync` is currently in preview, and is not recommended for production use. - Feedback and bug reports are welcome at cbt-python-client-v3-feedback@google.com, or through the Github `issue tracker`_. diff --git a/docs/async_data_client.rst b/docs/async_data_client.rst new file mode 100644 index 000000000..7d2901de4 --- /dev/null +++ b/docs/async_data_client.rst @@ -0,0 +1,6 @@ +Bigtable Data Client Async +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data._async.client + :members: + :show-inheritance: diff --git a/docs/async_data_exceptions.rst b/docs/async_data_exceptions.rst new file mode 100644 index 000000000..6180ef222 --- /dev/null +++ b/docs/async_data_exceptions.rst @@ -0,0 +1,6 @@ +Custom Exceptions +~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.exceptions + :members: + :show-inheritance: diff --git a/docs/async_data_mutations.rst b/docs/async_data_mutations.rst new file mode 100644 index 000000000..9d7a9eab2 --- /dev/null +++ b/docs/async_data_mutations.rst @@ -0,0 +1,6 @@ +Mutations +~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.mutations + :members: + :show-inheritance: diff --git a/docs/async_data_mutations_batcher.rst b/docs/async_data_mutations_batcher.rst new file mode 100644 index 000000000..3e81f885a --- /dev/null +++ b/docs/async_data_mutations_batcher.rst @@ -0,0 +1,6 @@ +Mutations Batcher Async +~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data._async.mutations_batcher + :members: + :show-inheritance: diff --git a/docs/async_data_read_modify_write_rules.rst b/docs/async_data_read_modify_write_rules.rst new file mode 100644 index 000000000..2f28ddf3f --- /dev/null +++ b/docs/async_data_read_modify_write_rules.rst @@ -0,0 +1,6 @@ +Read Modify Write Rules +~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.read_modify_write_rules + :members: + :show-inheritance: diff --git a/docs/async_data_read_rows_query.rst b/docs/async_data_read_rows_query.rst new file mode 100644 index 000000000..4e3e796d9 --- /dev/null +++ b/docs/async_data_read_rows_query.rst @@ -0,0 +1,6 @@ +Read Rows Query +~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.read_rows_query + :members: + :show-inheritance: diff --git a/docs/async_data_row.rst b/docs/async_data_row.rst new file mode 100644 index 000000000..63bc71143 --- /dev/null +++ b/docs/async_data_row.rst @@ -0,0 +1,6 @@ +Rows and Cells +~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.row + :members: + :show-inheritance: diff --git a/docs/async_data_row_filters.rst b/docs/async_data_row_filters.rst new file mode 100644 index 000000000..22bda8a26 --- /dev/null +++ b/docs/async_data_row_filters.rst @@ -0,0 +1,62 @@ +Bigtable Row Filters +==================== + +It is possible to use a +:class:`RowFilter ` +when constructing a :class:`ReadRowsQuery ` + +The following basic filters +are provided: + +* :class:`SinkFilter <.data.row_filters.SinkFilter>` +* :class:`PassAllFilter <.data.row_filters.PassAllFilter>` +* :class:`BlockAllFilter <.data.row_filters.BlockAllFilter>` +* :class:`RowKeyRegexFilter <.data.row_filters.RowKeyRegexFilter>` +* :class:`RowSampleFilter <.data.row_filters.RowSampleFilter>` +* :class:`FamilyNameRegexFilter <.data.row_filters.FamilyNameRegexFilter>` +* :class:`ColumnQualifierRegexFilter <.data.row_filters.ColumnQualifierRegexFilter>` +* :class:`TimestampRangeFilter <.data.row_filters.TimestampRangeFilter>` +* :class:`ColumnRangeFilter <.data.row_filters.ColumnRangeFilter>` +* :class:`ValueRegexFilter <.data.row_filters.ValueRegexFilter>` +* :class:`ValueRangeFilter <.data.row_filters.ValueRangeFilter>` +* :class:`CellsRowOffsetFilter <.data.row_filters.CellsRowOffsetFilter>` +* :class:`CellsRowLimitFilter <.data.row_filters.CellsRowLimitFilter>` +* :class:`CellsColumnLimitFilter <.data.row_filters.CellsColumnLimitFilter>` +* :class:`StripValueTransformerFilter <.data.row_filters.StripValueTransformerFilter>` +* :class:`ApplyLabelFilter <.data.row_filters.ApplyLabelFilter>` + +In addition, these filters can be combined into composite filters with + +* :class:`RowFilterChain <.data.row_filters.RowFilterChain>` +* :class:`RowFilterUnion <.data.row_filters.RowFilterUnion>` +* :class:`ConditionalRowFilter <.data.row_filters.ConditionalRowFilter>` + +These rules can be nested arbitrarily, with a basic filter at the lowest +level. For example: + +.. code:: python + + # Filter in a specified column (matching any column family). + col1_filter = ColumnQualifierRegexFilter(b'columnbia') + + # Create a filter to label results. + label1 = u'label-red' + label1_filter = ApplyLabelFilter(label1) + + # Combine the filters to label all the cells in columnbia. + chain1 = RowFilterChain(filters=[col1_filter, label1_filter]) + + # Create a similar filter to label cells blue. + col2_filter = ColumnQualifierRegexFilter(b'columnseeya') + label2 = u'label-blue' + label2_filter = ApplyLabelFilter(label2) + chain2 = RowFilterChain(filters=[col2_filter, label2_filter]) + + # Bring our two labeled columns together. + row_filter = RowFilterUnion(filters=[chain1, chain2]) + +---- + +.. automodule:: google.cloud.bigtable.data.row_filters + :members: + :show-inheritance: diff --git a/docs/async_data_usage.rst b/docs/async_data_usage.rst new file mode 100644 index 000000000..c436c5988 --- /dev/null +++ b/docs/async_data_usage.rst @@ -0,0 +1,14 @@ +Using the Async Data Client +=========================== + +.. toctree:: + :maxdepth: 2 + + async_data_client + async_data_mutations_batcher + async_data_read_rows_query + async_data_row + async_data_row_filters + async_data_mutations + async_data_read_modify_write_rules + async_data_exceptions diff --git a/docs/data-api.rst b/docs/data-api.rst index 01a49178f..9b50e9ec9 100644 --- a/docs/data-api.rst +++ b/docs/data-api.rst @@ -1,6 +1,13 @@ Data API ======== +.. note:: + This page describes how to use the Data API with the synchronous Bigtable client. + Examples for using the Data API with the async client can be found in the + `Getting Started Guide`_. + +.. _Getting Started Guide: https://cloud.google.com/bigtable/docs/samples-python-hello + After creating a :class:`Table ` and some column families, you are ready to store and retrieve data. diff --git a/docs/index.rst b/docs/index.rst index b1c8f0574..0f04542cc 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,6 +8,7 @@ Using the API :maxdepth: 2 usage + async_data_usage API Reference @@ -29,3 +30,8 @@ For a list of all ``google-cloud-datastore`` releases: :maxdepth: 2 changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/docs/summary_overview.md b/docs/summary_overview.md new file mode 100644 index 000000000..2379e8b6b --- /dev/null +++ b/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Cloud Bigtable API + +Overview of the APIs available for Cloud Bigtable API. + +## All entries + +Classes, methods and properties & attributes for +Cloud Bigtable API. + +[classes](https://cloud.google.com/python/docs/reference/bigtable/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/bigtable/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/bigtable/latest/summary_property.html) diff --git a/docs/usage.rst b/docs/usage.rst index 73a32b039..de0abac9c 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,5 +1,5 @@ -Using the API -============= +Using the Sync Client +===================== .. toctree:: :maxdepth: 2 diff --git a/google/cloud/bigtable/client.py b/google/cloud/bigtable/client.py index c82a268c6..0c89ea562 100644 --- a/google/cloud/bigtable/client.py +++ b/google/cloud/bigtable/client.py @@ -32,7 +32,6 @@ import grpc # type: ignore from google.api_core.gapic_v1 import client_info as client_info_lib -import google.auth # type: ignore from google.auth.credentials import AnonymousCredentials # type: ignore from google.cloud import bigtable_v2 @@ -215,58 +214,21 @@ def _get_scopes(self): return scopes def _emulator_channel(self, transport, options): - """Create a channel using self._credentials + """Create a channel for use with the Bigtable emulator. - Works in a similar way to ``grpc.secure_channel`` but using - ``grpc.local_channel_credentials`` rather than - ``grpc.ssh_channel_credentials`` to allow easy connection to a - local emulator. + Insecure channels are used for the emulator as secure channels + cannot be used to communicate on some environments. + https://github.com/googleapis/python-firestore/issues/359 Returns: grpc.Channel or grpc.aio.Channel """ - # TODO: Implement a special credentials type for emulator and use - # "transport.create_channel" to create gRPC channels once google-auth - # extends it's allowed credentials types. # Note: this code also exists in the firestore client. if "GrpcAsyncIOTransport" in str(transport.__name__): - return grpc.aio.secure_channel( - self._emulator_host, - self._local_composite_credentials(), - options=options, - ) + channel_fn = grpc.aio.insecure_channel else: - return grpc.secure_channel( - self._emulator_host, - self._local_composite_credentials(), - options=options, - ) - - def _local_composite_credentials(self): - """Create credentials for the local emulator channel. - - :return: grpc.ChannelCredentials - """ - credentials = google.auth.credentials.with_scopes_if_required( - self._credentials, None - ) - request = google.auth.transport.requests.Request() - - # Create the metadata plugin for inserting the authorization header. - metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin( - credentials, request - ) - - # Create a set of grpc.CallCredentials using the metadata plugin. - google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) - - # Using the local_credentials to allow connection to emulator - local_credentials = grpc.local_channel_credentials() - - # Combine the local credentials and the authorization credentials. - return grpc.composite_channel_credentials( - local_credentials, google_auth_credentials - ) + channel_fn = grpc.insecure_channel + return channel_fn(self._emulator_host, options=options) def _create_gapic_client_channel(self, client_class, grpc_transport): if self._emulator_host is not None: diff --git a/google/cloud/bigtable/data/README.rst b/google/cloud/bigtable/data/README.rst index 7a05cf913..8142cc34d 100644 --- a/google/cloud/bigtable/data/README.rst +++ b/google/cloud/bigtable/data/README.rst @@ -1,7 +1,5 @@ -Async Data Client Preview -========================= - -This new client is currently in preview, and is not recommended for production use. +Async Data Client +================= Synchronous API surface and usage examples coming soon diff --git a/google/cloud/bigtable/data/_async/client.py b/google/cloud/bigtable/data/_async/client.py index ed14c618d..e385ecde7 100644 --- a/google/cloud/bigtable/data/_async/client.py +++ b/google/cloud/bigtable/data/_async/client.py @@ -101,9 +101,6 @@ def __init__( Client should be created within an async context (running event loop) - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: project: the project which the client acts on behalf of. If not passed, falls back to the default inferred @@ -566,9 +563,6 @@ async def read_rows_stream( Failed requests within operation_timeout will be retried based on the retryable_errors list until operation_timeout is reached. - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - query: contains details about which rows to return - operation_timeout: the time budget for the entire operation, in seconds. @@ -620,9 +614,6 @@ async def read_rows( Failed requests within operation_timeout will be retried based on the retryable_errors list until operation_timeout is reached. - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - query: contains details about which rows to return - operation_timeout: the time budget for the entire operation, in seconds. @@ -669,9 +660,6 @@ async def read_row( Failed requests within operation_timeout will be retried based on the retryable_errors list until operation_timeout is reached. - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - query: contains details about which rows to return - operation_timeout: the time budget for the entire operation, in seconds. @@ -727,9 +715,6 @@ async def read_rows_sharded( results = await table.read_rows_sharded(shard_queries) ``` - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - sharded_query: a sharded query to execute - operation_timeout: the time budget for the entire operation, in seconds. @@ -810,9 +795,6 @@ async def row_exists( Return a boolean indicating whether the specified row exists in the table. uses the filters: chain(limit cells per row = 1, strip value) - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - row_key: the key of the row to check - operation_timeout: the time budget for the entire operation, in seconds. @@ -867,9 +849,6 @@ async def sample_row_keys( RowKeySamples is simply a type alias for list[tuple[bytes, int]]; a list of row_keys, along with offset positions in the table - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - operation_timeout: the time budget for the entire operation, in seconds. Failed requests will be retried within the budget.i @@ -942,9 +921,6 @@ def mutations_batcher( Can be used to iteratively add mutations that are flushed as a group, to avoid excess network calls - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - flush_interval: Automatically flush every flush_interval seconds. If None, a table default will be used @@ -994,9 +970,6 @@ async def mutate_row( Idempotent operations (i.e, all mutations have an explicit timestamp) will be retried on server failure. Non-idempotent operations will not. - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - row_key: the row to apply mutations to - mutations: the set of mutations to apply to the row @@ -1077,9 +1050,6 @@ async def bulk_mutate_rows( will be retried on failure. Non-idempotent will not, and will reported in a raised exception group - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - mutation_entries: the batches of mutations to apply Each entry will be applied atomically, but entries will be applied @@ -1128,9 +1098,6 @@ async def check_and_mutate_row( Non-idempotent operation: will not be retried - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - row_key: the key of the row to mutate - predicate: the filter to be applied to the contents of the specified row. @@ -1150,7 +1117,7 @@ async def check_and_mutate_row( applied to row_key. Entries are applied in order, meaning that earlier mutations can be masked by later ones. Must contain at least one entry if - `true_case_mutations is empty, and at most 100000. + `true_case_mutations` is empty, and at most 100000. - operation_timeout: the time budget for the entire operation, in seconds. Failed requests will not be retried. Defaults to the Table's default_operation_timeout Returns: @@ -1199,9 +1166,6 @@ async def read_modify_write_row( Non-idempotent operation: will not be retried - Warning: BigtableDataClientAsync is currently in preview, and is not - yet recommended for production use. - Args: - row_key: the key of the row to apply read/modify/write rules to - rules: A rule or set of rules to apply to the row. diff --git a/google/cloud/bigtable/data/row.py b/google/cloud/bigtable/data/row.py index ecf9cea66..13019cbdd 100644 --- a/google/cloud/bigtable/data/row.py +++ b/google/cloud/bigtable/data/row.py @@ -147,10 +147,12 @@ def __str__(self) -> str: """ Human-readable string representation - { - (family='fam', qualifier=b'col'): [b'value', (+1 more),], - (family='fam', qualifier=b'col2'): [b'other'], - } + .. code-block:: python + + { + (family='fam', qualifier=b'col'): [b'value', (+1 more),], + (family='fam', qualifier=b'col2'): [b'other'], + } """ output = ["{"] for family, qualifier in self._get_column_components(): diff --git a/google/cloud/bigtable/gapic_version.py b/google/cloud/bigtable/gapic_version.py index f01e1d3a5..008f4dd36 100644 --- a/google/cloud/bigtable/gapic_version.py +++ b/google/cloud/bigtable/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.23.1" # {x-release-please-version} diff --git a/google/cloud/bigtable_admin/gapic_version.py b/google/cloud/bigtable_admin/gapic_version.py index f01e1d3a5..008f4dd36 100644 --- a/google/cloud/bigtable_admin/gapic_version.py +++ b/google/cloud/bigtable_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.23.1" # {x-release-please-version} diff --git a/google/cloud/bigtable_admin_v2/gapic_version.py b/google/cloud/bigtable_admin_v2/gapic_version.py index f01e1d3a5..008f4dd36 100644 --- a/google/cloud/bigtable_admin_v2/gapic_version.py +++ b/google/cloud/bigtable_admin_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.23.1" # {x-release-please-version} diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py index 61f425953..879702e86 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -905,7 +905,6 @@ def __call__( body = json_format.MessageToJson( transcoded_request["body"], - including_default_value_fields=False, use_integers_for_enums=True, ) uri = transcoded_request["uri"] @@ -915,7 +914,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1005,9 +1003,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1016,7 +1012,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1102,9 +1097,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1113,7 +1106,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1199,7 +1191,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1274,7 +1265,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1349,7 +1339,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1431,7 +1420,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1522,7 +1510,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1680,9 +1667,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1691,7 +1676,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1785,7 +1769,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1876,7 +1859,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1965,7 +1947,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2056,7 +2037,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2145,7 +2125,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2236,9 +2215,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2247,7 +2224,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2339,9 +2315,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2350,7 +2324,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2507,9 +2480,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2518,7 +2489,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2604,9 +2574,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2615,7 +2583,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2707,9 +2674,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2718,7 +2683,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2796,9 +2760,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2807,7 +2769,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2899,9 +2860,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2910,7 +2869,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py index ad171d8f3..49bc756e1 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -998,9 +998,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1009,7 +1007,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1097,9 +1094,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1108,7 +1103,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1196,9 +1190,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1207,7 +1199,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1294,9 +1285,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1305,7 +1294,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1403,9 +1391,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1414,7 +1400,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1496,7 +1481,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1578,7 +1562,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1653,7 +1636,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1725,9 +1707,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1736,7 +1716,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1820,9 +1799,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1831,7 +1808,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1919,7 +1895,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2082,9 +2057,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2093,7 +2066,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2201,7 +2173,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2292,7 +2263,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2381,7 +2351,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2484,7 +2453,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2573,7 +2541,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2663,9 +2630,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2674,7 +2639,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2762,9 +2726,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2773,7 +2735,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -2935,9 +2896,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2946,7 +2905,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -3041,9 +2999,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3052,7 +3008,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -3141,9 +3096,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3152,7 +3105,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -3240,9 +3192,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3251,7 +3201,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -3336,9 +3285,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3347,7 +3294,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -3437,9 +3383,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3448,7 +3392,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/bigtable_v2/gapic_version.py b/google/cloud/bigtable_v2/gapic_version.py index f01e1d3a5..008f4dd36 100644 --- a/google/cloud/bigtable_v2/gapic_version.py +++ b/google/cloud/bigtable_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.23.1" # {x-release-please-version} diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/rest.py b/google/cloud/bigtable_v2/services/bigtable/transports/rest.py index 17b47cb1c..d77291a65 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/rest.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -500,9 +500,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -511,7 +509,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -610,9 +607,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -621,7 +616,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -709,9 +703,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -720,7 +712,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -807,9 +798,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -818,7 +807,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -905,9 +893,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -916,7 +902,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1007,9 +992,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1018,7 +1001,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1106,9 +1088,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1117,7 +1097,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1204,9 +1183,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1215,7 +1192,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1302,7 +1278,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/samples/beam/noxfile_config.py b/samples/beam/noxfile_config.py index eb01435a0..66d7bc5ac 100644 --- a/samples/beam/noxfile_config.py +++ b/samples/beam/noxfile_config.py @@ -23,8 +23,8 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. "ignored_versions": [ - "2.7", # not supported - "3.10", # Beam wheels not yet released for Python 3.10 + "3.7", # Beam no longer supports Python 3.7 for new releases + "3.12", # Beam not yet supported for Python 3.12 ], # Old samples are opted out of enforcing Python type hints # All new samples should feature them diff --git a/samples/beam/requirements-test.txt b/samples/beam/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/beam/requirements-test.txt +++ b/samples/beam/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/beam/requirements.txt b/samples/beam/requirements.txt index 70b1371ae..86e305c22 100644 --- a/samples/beam/requirements.txt +++ b/samples/beam/requirements.txt @@ -1,3 +1,3 @@ -apache-beam==2.53.0 +apache-beam==2.54.0 google-cloud-bigtable==2.22.0 google-cloud-core==2.4.1 diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py new file mode 100644 index 000000000..d608bb073 --- /dev/null +++ b/samples/hello/async_main.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python + +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Demonstrates how to connect to Cloud Bigtable and run some basic operations with the async APIs + +Prerequisites: + +- Create a Cloud Bigtable instance. + https://cloud.google.com/bigtable/docs/creating-instance +- Set your Google Application Default Credentials. + https://developers.google.com/identity/protocols/application-default-credentials +""" + +import argparse +import asyncio + +# [START bigtable_async_hw_imports] +from google.cloud import bigtable +from google.cloud.bigtable.data import row_filters +from google.cloud.bigtable.data import RowMutationEntry +from google.cloud.bigtable.data import SetCell +from google.cloud.bigtable.data import ReadRowsQuery + +# [END bigtable_async_hw_imports] + + +async def main(project_id, instance_id, table_id): + # [START bigtable_async_hw_connect] + client = bigtable.data.BigtableDataClientAsync(project=project_id) + table = client.get_table(instance_id, table_id) + # [END bigtable_async_hw_connect] + + # [START bigtable_async_hw_create_table] + from google.cloud.bigtable import column_family + + # the async client only supports the data API. Table creation as an admin operation + # use admin client to create the table + print("Creating the {} table.".format(table_id)) + admin_client = bigtable.Client(project=project_id, admin=True) + admin_instance = admin_client.instance(instance_id) + admin_table = admin_instance.table(table_id) + + print("Creating column family cf1 with Max Version GC rule...") + # Create a column family with GC policy : most recent N versions + # Define the GC policy to retain only the most recent 2 versions + max_versions_rule = column_family.MaxVersionsGCRule(2) + column_family_id = "cf1" + column_families = {column_family_id: max_versions_rule} + if not admin_table.exists(): + admin_table.create(column_families=column_families) + else: + print("Table {} already exists.".format(table_id)) + # [END bigtable_async_hw_create_table] + + # [START bigtable_async_hw_write_rows] + print("Writing some greetings to the table.") + greetings = ["Hello World!", "Hello Cloud Bigtable!", "Hello Python!"] + mutations = [] + column = "greeting" + for i, value in enumerate(greetings): + # Note: This example uses sequential numeric IDs for simplicity, + # but this can result in poor performance in a production + # application. Since rows are stored in sorted order by key, + # sequential keys can result in poor distribution of operations + # across nodes. + # + # For more information about how to design a Bigtable schema for + # the best performance, see the documentation: + # + # https://cloud.google.com/bigtable/docs/schema-design + row_key = "greeting{}".format(i).encode() + row_mutation = RowMutationEntry( + row_key, SetCell(column_family_id, column, value) + ) + mutations.append(row_mutation) + await table.bulk_mutate_rows(mutations) + # [END bigtable_async_hw_write_rows] + + # [START bigtable_async_hw_create_filter] + # Create a filter to only retrieve the most recent version of the cell + # for each column across entire row. + row_filter = row_filters.CellsColumnLimitFilter(1) + # [END bigtable_async_hw_create_filter] + + # [START bigtable_async_hw_get_with_filter] + # [START bigtable_async_hw_get_by_key] + print("Getting a single greeting by row key.") + key = "greeting0".encode() + + row = await table.read_row(key, row_filter=row_filter) + cell = row.cells[0] + print(cell.value.decode("utf-8")) + # [END bigtable_async_hw_get_by_key] + # [END bigtable_async_hw_get_with_filter] + + # [START bigtable_async_hw_scan_with_filter] + # [START bigtable_async_hw_scan_all] + print("Scanning for all greetings:") + query = ReadRowsQuery(row_filter=row_filter) + async for row in await table.read_rows_stream(query): + cell = row.cells[0] + print(cell.value.decode("utf-8")) + # [END bigtable_async_hw_scan_all] + # [END bigtable_async_hw_scan_with_filter] + + # [START bigtable_async_hw_delete_table] + # the async client only supports the data API. Table deletion as an admin operation + # use admin client to create the table + print("Deleting the {} table.".format(table_id)) + admin_table.delete() + # [END bigtable_async_hw_delete_table] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("project_id", help="Your Cloud Platform project ID.") + parser.add_argument( + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) + parser.add_argument( + "--table", help="Table to create and destroy.", default="Hello-Bigtable" + ) + + args = parser.parse_args() + asyncio.run(main(args.project_id, args.instance_id, args.table)) diff --git a/samples/hello/async_main_test.py b/samples/hello/async_main_test.py new file mode 100644 index 000000000..a47ac2d33 --- /dev/null +++ b/samples/hello/async_main_test.py @@ -0,0 +1,39 @@ +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import random +import asyncio + +from async_main import main + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_NAME_FORMAT = "hello-world-test-{}" +TABLE_NAME_RANGE = 10000 + + +def test_async_main(capsys): + table_name = TABLE_NAME_FORMAT.format(random.randrange(TABLE_NAME_RANGE)) + + asyncio.run(main(PROJECT, BIGTABLE_INSTANCE, table_name)) + + out, _ = capsys.readouterr() + assert "Creating the {} table.".format(table_name) in out + assert "Writing some greetings to the table." in out + assert "Getting a single greeting by row key." in out + assert "Hello World!" in out + assert "Scanning for all greetings" in out + assert "Hello Cloud Bigtable!" in out + assert "Deleting the {} table.".format(table_name) in out diff --git a/samples/hello/main.py b/samples/hello/main.py index 5e47b4a38..3b7de34b0 100644 --- a/samples/hello/main.py +++ b/samples/hello/main.py @@ -18,8 +18,8 @@ Prerequisites: -- Create a Cloud Bigtable cluster. - https://cloud.google.com/bigtable/docs/creating-cluster +- Create a Cloud Bigtable instance. + https://cloud.google.com/bigtable/docs/creating-instance - Set your Google Application Default Credentials. https://developers.google.com/identity/protocols/application-default-credentials """ diff --git a/samples/hello/requirements-test.txt b/samples/hello/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/hello/requirements-test.txt +++ b/samples/hello/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/hello/requirements.txt b/samples/hello/requirements.txt index 68419fbcb..dd4fc1fb3 100644 --- a/samples/hello/requirements.txt +++ b/samples/hello/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.22.0 +google-cloud-bigtable==2.23.0 google-cloud-core==2.4.1 diff --git a/samples/hello_happybase/requirements-test.txt b/samples/hello_happybase/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/hello_happybase/requirements-test.txt +++ b/samples/hello_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/instanceadmin/requirements-test.txt b/samples/instanceadmin/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/instanceadmin/requirements-test.txt +++ b/samples/instanceadmin/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/metricscaler/requirements-test.txt b/samples/metricscaler/requirements-test.txt index 8b8270b6c..c0d4f7003 100644 --- a/samples/metricscaler/requirements-test.txt +++ b/samples/metricscaler/requirements-test.txt @@ -1,3 +1,3 @@ -pytest==8.0.0 +pytest==7.4.4 mock==5.1.0 google-cloud-testutils diff --git a/samples/quickstart/requirements-test.txt b/samples/quickstart/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/quickstart/requirements-test.txt +++ b/samples/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/quickstart_happybase/requirements-test.txt b/samples/quickstart_happybase/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/quickstart_happybase/requirements-test.txt +++ b/samples/quickstart_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py new file mode 100644 index 000000000..cb51bdc78 --- /dev/null +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python + +# Copyright 2024, Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +async def write_simple(table): + # [START bigtable_async_write_simple] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import SetCell + + async def write_simple(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + row_key = b"phone#4c410523#20190501" + + cell_mutation = SetCell(family_id, "connected_cell", 1) + wifi_mutation = SetCell(family_id, "connected_wifi", 1) + os_mutation = SetCell(family_id, "os_build", "PQ2A.190405.003") + + await table.mutate_row(row_key, cell_mutation) + await table.mutate_row(row_key, wifi_mutation) + await table.mutate_row(row_key, os_mutation) + + # [END bigtable_async_write_simple] + await write_simple(table.client.project, table.instance_id, table.table_id) + + +async def write_batch(table): + # [START bigtable_async_writes_batch] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data.mutations import SetCell + from google.cloud.bigtable.data.mutations import RowMutationEntry + + async def write_batch(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + + async with table.mutations_batcher() as batcher: + mutation_list = [ + SetCell(family_id, "connected_cell", 1), + SetCell(family_id, "connected_wifi", 1), + SetCell(family_id, "os_build", "12155.0.0-rc1"), + ] + batcher.append( + RowMutationEntry("tablet#a0b81f74#20190501", mutation_list) + ) + batcher.append( + RowMutationEntry("tablet#a0b81f74#20190502", mutation_list) + ) + # [END bigtable_async_writes_batch] + await write_batch(table.client.project, table.instance_id, table.table_id) + + +async def write_increment(table): + # [START bigtable_async_write_increment] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule + + async def write_increment(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + row_key = "phone#4c410523#20190501" + + # Decrement the connected_wifi value by 1. + increment_rule = IncrementRule( + family_id, "connected_wifi", increment_amount=-1 + ) + result_row = await table.read_modify_write_row(row_key, increment_rule) + + # check result + cell = result_row[0] + print(f"{cell.row_key} value: {int(cell)}") + # [END bigtable_async_write_increment] + await write_increment(table.client.project, table.instance_id, table.table_id) + + +async def write_conditional(table): + # [START bigtable_async_writes_conditional] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import row_filters + from google.cloud.bigtable.data import SetCell + + async def write_conditional(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + row_key = "phone#4c410523#20190501" + + row_filter = row_filters.RowFilterChain( + filters=[ + row_filters.FamilyNameRegexFilter(family_id), + row_filters.ColumnQualifierRegexFilter("os_build"), + row_filters.ValueRegexFilter("PQ2A\\..*"), + ] + ) + + if_true = SetCell(family_id, "os_name", "android") + result = await table.check_and_mutate_row( + row_key, + row_filter, + true_case_mutations=if_true, + false_case_mutations=None, + ) + if result is True: + print("The row os_name was set to android") + # [END bigtable_async_writes_conditional] + await write_conditional(table.client.project, table.instance_id, table.table_id) + + +async def read_row(table): + # [START bigtable_async_reads_row] + from google.cloud.bigtable.data import BigtableDataClientAsync + + async def read_row(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + row_key = "phone#4c410523#20190501" + row = await table.read_row(row_key) + print(row) + # [END bigtable_async_reads_row] + await read_row(table.client.project, table.instance_id, table.table_id) + + +async def read_row_partial(table): + # [START bigtable_async_reads_row_partial] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import row_filters + + async def read_row_partial(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + row_key = "phone#4c410523#20190501" + col_filter = row_filters.ColumnQualifierRegexFilter(b"os_build") + + row = await table.read_row(row_key, row_filter=col_filter) + print(row) + # [END bigtable_async_reads_row_partial] + await read_row_partial(table.client.project, table.instance_id, table.table_id) + + +async def read_rows_multiple(table): + # [START bigtable_async_reads_rows] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import ReadRowsQuery + + async def read_rows(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + query = ReadRowsQuery(row_keys=[ + b"phone#4c410523#20190501", + b"phone#4c410523#20190502" + ]) + async for row in await table.read_rows_stream(query): + print(row) + + # [END bigtable_async_reads_rows] + await read_rows(table.client.project, table.instance_id, table.table_id) + + +async def read_row_range(table): + # [START bigtable_async_reads_row_range] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import RowRange + + async def read_row_range(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + row_range = RowRange( + start_key=b"phone#4c410523#20190501", + end_key=b"phone#4c410523#201906201" + ) + query = ReadRowsQuery(row_ranges=[row_range]) + + async for row in await table.read_rows_stream(query): + print(row) + # [END bigtable_async_reads_row_range] + await read_row_range(table.client.project, table.instance_id, table.table_id) + + +async def read_with_prefix(table): + # [START bigtable_async_reads_prefix] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import RowRange + + async def read_prefix(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + prefix = "phone#" + end_key = prefix[:-1] + chr(ord(prefix[-1]) + 1) + prefix_range = RowRange(start_key=prefix, end_key=end_key) + query = ReadRowsQuery(row_ranges=[prefix_range]) + + async for row in await table.read_rows_stream(query): + print(row) + # [END bigtable_async_reads_prefix] + await read_prefix(table.client.project, table.instance_id, table.table_id) + + +async def read_with_filter(table): + # [START bigtable_async_reads_filter] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import row_filters + + async def read_with_filter(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + row_filter = row_filters.ValueRegexFilter(b"PQ2A.*$") + query = ReadRowsQuery(row_filter=row_filter) + + async for row in await table.read_rows_stream(query): + print(row) + # [END bigtable_async_reads_filter] + await read_with_filter(table.client.project, table.instance_id, table.table_id) diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py new file mode 100644 index 000000000..d9968e6dc --- /dev/null +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -0,0 +1,103 @@ +# Copyright 2024, Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest +import pytest_asyncio +import uuid +import os + +import data_client_snippets_async as data_snippets + + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_STATIC = os.getenv( + "BIGTABLE_TABLE", None +) # if not set, a temproary table will be generated + + +@pytest.fixture(scope="session") +def table_id(): + from google.cloud import bigtable + + client = bigtable.Client(project=PROJECT, admin=True) + instance = client.instance(BIGTABLE_INSTANCE) + table_id = TABLE_ID_STATIC or f"data-client-{str(uuid.uuid4())[:16]}" + + admin_table = instance.table(table_id) + if not admin_table.exists(): + admin_table.create(column_families={"family": None, "stats_summary": None}) + + yield table_id + + if not table_id == TABLE_ID_STATIC: + # clean up table when finished + admin_table.delete() + + +@pytest_asyncio.fixture +async def table(table_id): + from google.cloud.bigtable.data import BigtableDataClientAsync + + async with BigtableDataClientAsync(project=PROJECT) as client: + async with client.get_table(BIGTABLE_INSTANCE, table_id) as table: + yield table + + +@pytest.mark.asyncio +async def test_write_simple(table): + await data_snippets.write_simple(table) + + +@pytest.mark.asyncio +async def test_write_batch(table): + await data_snippets.write_batch(table) + + +@pytest.mark.asyncio +async def test_write_increment(table): + await data_snippets.write_increment(table) + + +@pytest.mark.asyncio +async def test_write_conditional(table): + await data_snippets.write_conditional(table) + + +@pytest.mark.asyncio +async def test_read_row(table): + await data_snippets.read_row(table) + + +@pytest.mark.asyncio +async def test_read_row_partial(table): + await data_snippets.read_row_partial(table) + + +@pytest.mark.asyncio +async def test_read_rows_multiple(table): + await data_snippets.read_rows_multiple(table) + + +@pytest.mark.asyncio +async def test_read_row_range(table): + await data_snippets.read_row_range(table) + + +@pytest.mark.asyncio +async def test_read_with_prefix(table): + await data_snippets.read_with_prefix(table) + + +@pytest.mark.asyncio +async def test_read_with_filter(table): + await data_snippets.read_with_filter(table) diff --git a/samples/snippets/data_client/noxfile.py b/samples/snippets/data_client/noxfile.py new file mode 100644 index 000000000..483b55901 --- /dev/null +++ b/samples/snippets/data_client/noxfile.py @@ -0,0 +1,292 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8") + else: + session.install("flake8", "flake8-annotations") + + args = FLAKE8_COMMON_ARGS + [ + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/samples/snippets/data_client/requirements-test.txt b/samples/snippets/data_client/requirements-test.txt new file mode 100644 index 000000000..5cb431d92 --- /dev/null +++ b/samples/snippets/data_client/requirements-test.txt @@ -0,0 +1,2 @@ +pytest==7.4.4 +pytest-asyncio diff --git a/samples/snippets/data_client/requirements.txt b/samples/snippets/data_client/requirements.txt new file mode 100644 index 000000000..835e1bc78 --- /dev/null +++ b/samples/snippets/data_client/requirements.txt @@ -0,0 +1 @@ +google-cloud-bigtable==2.23.0 diff --git a/samples/snippets/deletes/deletes_snippets.py b/samples/snippets/deletes/deletes_snippets.py index 8e78083bf..72f812ca2 100644 --- a/samples/snippets/deletes/deletes_snippets.py +++ b/samples/snippets/deletes/deletes_snippets.py @@ -37,9 +37,7 @@ def delete_from_column_family(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) row = table.row("phone#4c410523#20190501") - row.delete_cells( - column_family_id="cell_plan", columns=row.ALL_COLUMNS - ) + row.delete_cells(column_family_id="cell_plan", columns=row.ALL_COLUMNS) row.commit() diff --git a/samples/snippets/deletes/deletes_test.py b/samples/snippets/deletes/deletes_test.py index bf23daa59..bebaabafb 100644 --- a/samples/snippets/deletes/deletes_test.py +++ b/samples/snippets/deletes/deletes_test.py @@ -1,4 +1,5 @@ # Copyright 2020, Google LLC + # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -94,46 +95,46 @@ def table_id(): yield table_id -def assert_snapshot_match(capsys, snapshot): +def assert_output_match(capsys, expected): out, _ = capsys.readouterr() - snapshot.assert_match(out) + assert out == expected -def test_delete_from_column(capsys, snapshot, table_id): +def test_delete_from_column(capsys, table_id): deletes_snippets.delete_from_column(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_from_column_family(capsys, snapshot, table_id): +def test_delete_from_column_family(capsys, table_id): deletes_snippets.delete_from_column_family(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_from_row(capsys, snapshot, table_id): +def test_delete_from_row(capsys, table_id): deletes_snippets.delete_from_row(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_streaming_and_batching(capsys, snapshot, table_id): +def test_streaming_and_batching(capsys, table_id): deletes_snippets.streaming_and_batching(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_check_and_mutate(capsys, snapshot, table_id): +def test_check_and_mutate(capsys, table_id): deletes_snippets.check_and_mutate(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_drop_row_range(capsys, snapshot, table_id): +def test_drop_row_range(capsys, table_id): deletes_snippets.drop_row_range(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_column_family(capsys, snapshot, table_id): +def test_delete_column_family(capsys, table_id): deletes_snippets.delete_column_family(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_table(capsys, snapshot, table_id): +def test_delete_table(capsys, table_id): deletes_snippets.delete_table(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") diff --git a/samples/snippets/deletes/requirements-test.txt b/samples/snippets/deletes/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/deletes/requirements-test.txt +++ b/samples/snippets/deletes/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/deletes/requirements.txt b/samples/snippets/deletes/requirements.txt index ae10593d2..6dc985893 100644 --- a/samples/snippets/deletes/requirements.txt +++ b/samples/snippets/deletes/requirements.txt @@ -1,2 +1 @@ google-cloud-bigtable==2.22.0 -snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/deletes/snapshots/snap_deletes_test.py b/samples/snippets/deletes/snapshots/snap_deletes_test.py deleted file mode 100644 index 04a7db940..000000000 --- a/samples/snippets/deletes/snapshots/snap_deletes_test.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# snapshottest: v1 - https://goo.gl/zC4yUc -from __future__ import unicode_literals - -from snapshottest import Snapshot - - -snapshots = Snapshot() - -snapshots['test_check_and_mutate 1'] = '' - -snapshots['test_delete_column_family 1'] = '' - -snapshots['test_delete_from_column 1'] = '' - -snapshots['test_delete_from_column_family 1'] = '' - -snapshots['test_delete_from_row 1'] = '' - -snapshots['test_delete_table 1'] = '' - -snapshots['test_drop_row_range 1'] = '' - -snapshots['test_streaming_and_batching 1'] = '' diff --git a/samples/snippets/deletes/snapshots/__init__.py b/samples/snippets/filters/__init__.py similarity index 100% rename from samples/snippets/deletes/snapshots/__init__.py rename to samples/snippets/filters/__init__.py diff --git a/samples/snippets/filters/filters_test.py b/samples/snippets/filters/filters_test.py index 35cf62ff0..aedd8f08d 100644 --- a/samples/snippets/filters/filters_test.py +++ b/samples/snippets/filters/filters_test.py @@ -16,11 +16,13 @@ import os import time import uuid +import inspect from google.cloud import bigtable import pytest +from .snapshots.snap_filters_test import snapshots -import filter_snippets +from . import filter_snippets PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] @@ -97,131 +99,148 @@ def table_id(): table.delete() -def test_filter_limit_row_sample(capsys, snapshot, table_id): +def test_filter_limit_row_sample(capsys, table_id): filter_snippets.filter_limit_row_sample(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() assert "Reading data for" in out -def test_filter_limit_row_regex(capsys, snapshot, table_id): +def test_filter_limit_row_regex(capsys, table_id): filter_snippets.filter_limit_row_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_cells_per_col(capsys, snapshot, table_id): +def test_filter_limit_cells_per_col(capsys, table_id): filter_snippets.filter_limit_cells_per_col(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_cells_per_row(capsys, snapshot, table_id): +def test_filter_limit_cells_per_row(capsys, table_id): filter_snippets.filter_limit_cells_per_row(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_cells_per_row_offset(capsys, snapshot, table_id): +def test_filter_limit_cells_per_row_offset(capsys, table_id): filter_snippets.filter_limit_cells_per_row_offset( PROJECT, BIGTABLE_INSTANCE, table_id ) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_col_family_regex(capsys, snapshot, table_id): +def test_filter_limit_col_family_regex(capsys, table_id): filter_snippets.filter_limit_col_family_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_col_qualifier_regex(capsys, snapshot, table_id): +def test_filter_limit_col_qualifier_regex(capsys, table_id): filter_snippets.filter_limit_col_qualifier_regex( PROJECT, BIGTABLE_INSTANCE, table_id ) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_col_range(capsys, snapshot, table_id): +def test_filter_limit_col_range(capsys, table_id): filter_snippets.filter_limit_col_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_value_range(capsys, snapshot, table_id): +def test_filter_limit_value_range(capsys, table_id): filter_snippets.filter_limit_value_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_value_regex(capsys, snapshot, table_id): +def test_filter_limit_value_regex(capsys, table_id): filter_snippets.filter_limit_value_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_timestamp_range(capsys, snapshot, table_id): +def test_filter_limit_timestamp_range(capsys, table_id): filter_snippets.filter_limit_timestamp_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_block_all(capsys, snapshot, table_id): +def test_filter_limit_block_all(capsys, table_id): filter_snippets.filter_limit_block_all(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_pass_all(capsys, snapshot, table_id): +def test_filter_limit_pass_all(capsys, table_id): filter_snippets.filter_limit_pass_all(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_modify_strip_value(capsys, snapshot, table_id): +def test_filter_modify_strip_value(capsys, table_id): filter_snippets.filter_modify_strip_value(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_modify_apply_label(capsys, snapshot, table_id): +def test_filter_modify_apply_label(capsys, table_id): filter_snippets.filter_modify_apply_label(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_composing_chain(capsys, snapshot, table_id): +def test_filter_composing_chain(capsys, table_id): filter_snippets.filter_composing_chain(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_composing_interleave(capsys, snapshot, table_id): +def test_filter_composing_interleave(capsys, table_id): filter_snippets.filter_composing_interleave(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_composing_condition(capsys, snapshot, table_id): +def test_filter_composing_condition(capsys, table_id): filter_snippets.filter_composing_condition(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected diff --git a/samples/snippets/filters/requirements-test.txt b/samples/snippets/filters/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/filters/requirements-test.txt +++ b/samples/snippets/filters/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/filters/requirements.txt b/samples/snippets/filters/requirements.txt index ae10593d2..6dc985893 100644 --- a/samples/snippets/filters/requirements.txt +++ b/samples/snippets/filters/requirements.txt @@ -1,2 +1 @@ google-cloud-bigtable==2.22.0 -snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/filters/snapshots/snap_filters_test.py b/samples/snippets/filters/snapshots/snap_filters_test.py index a0580f565..2331c93bc 100644 --- a/samples/snippets/filters/snapshots/snap_filters_test.py +++ b/samples/snippets/filters/snapshots/snap_filters_test.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -# snapshottest: v1 - https://goo.gl/zC4yUc -# flake8: noqa +# this was previously implemented using the `snapshottest` package (https://goo.gl/zC4yUc), +# which is not compatible with Python 3.12. So we moved to a standard dictionary storing +# expected outputs for each test from __future__ import unicode_literals -from snapshottest import Snapshot -snapshots = Snapshot() +snapshots = {} -snapshots['test_filter_limit_row_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_row_regex'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -27,7 +27,7 @@ ''' -snapshots['test_filter_limit_cells_per_col 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_cells_per_col'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -71,7 +71,7 @@ ''' -snapshots['test_filter_limit_cells_per_row 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_cells_per_row'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -102,7 +102,7 @@ ''' -snapshots['test_filter_limit_cells_per_row_offset 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_cells_per_row_offset'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 Column Family stats_summary @@ -132,7 +132,7 @@ ''' -snapshots['test_filter_limit_col_family_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_col_family_regex'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -164,7 +164,7 @@ ''' -snapshots['test_filter_limit_col_qualifier_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_col_qualifier_regex'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -191,7 +191,7 @@ ''' -snapshots['test_filter_limit_col_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_col_range'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -207,7 +207,7 @@ ''' -snapshots['test_filter_limit_value_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_value_range'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 @@ -217,7 +217,7 @@ ''' -snapshots['test_filter_limit_value_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_value_regex'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 @@ -239,15 +239,15 @@ ''' -snapshots['test_filter_limit_timestamp_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_timestamp_range'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 ''' -snapshots['test_filter_limit_block_all 1'] = '' +snapshots['test_filter_limit_block_all'] = '' -snapshots['test_filter_limit_pass_all 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_pass_all'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -291,7 +291,7 @@ ''' -snapshots['test_filter_modify_strip_value 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_modify_strip_value'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: @2019-04-30 23:00:00+00:00 @@ -335,7 +335,7 @@ ''' -snapshots['test_filter_modify_apply_label 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_modify_apply_label'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 [labelled] \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 [labelled] @@ -379,7 +379,7 @@ ''' -snapshots['test_filter_composing_chain 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_composing_chain'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 @@ -402,7 +402,7 @@ ''' -snapshots['test_filter_composing_interleave 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_composing_interleave'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 \tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 @@ -435,7 +435,7 @@ ''' -snapshots['test_filter_composing_condition 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_composing_condition'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 [filtered-out] \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 [filtered-out] diff --git a/samples/snippets/reads/__init__.py b/samples/snippets/reads/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/samples/snippets/reads/reads_test.py b/samples/snippets/reads/reads_test.py index 0b61e341f..da826d6fb 100644 --- a/samples/snippets/reads/reads_test.py +++ b/samples/snippets/reads/reads_test.py @@ -14,11 +14,13 @@ import datetime import os import uuid +import inspect from google.cloud import bigtable import pytest -import read_snippets +from .snapshots.snap_reads_test import snapshots +from . import read_snippets PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] @@ -72,50 +74,57 @@ def table_id(): table.delete() -def test_read_row(capsys, snapshot, table_id): +def test_read_row(capsys, table_id): read_snippets.read_row(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_row_partial(capsys, snapshot, table_id): +def test_read_row_partial(capsys, table_id): read_snippets.read_row_partial(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_rows(capsys, snapshot, table_id): +def test_read_rows(capsys, table_id): read_snippets.read_rows(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_row_range(capsys, snapshot, table_id): +def test_read_row_range(capsys, table_id): read_snippets.read_row_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_row_ranges(capsys, snapshot, table_id): +def test_read_row_ranges(capsys, table_id): read_snippets.read_row_ranges(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_prefix(capsys, snapshot, table_id): +def test_read_prefix(capsys, table_id): read_snippets.read_prefix(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_filter(capsys, snapshot, table_id): +def test_read_filter(capsys, table_id): read_snippets.read_filter(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected diff --git a/samples/snippets/reads/requirements-test.txt b/samples/snippets/reads/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/reads/requirements-test.txt +++ b/samples/snippets/reads/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/reads/requirements.txt b/samples/snippets/reads/requirements.txt index ae10593d2..6dc985893 100644 --- a/samples/snippets/reads/requirements.txt +++ b/samples/snippets/reads/requirements.txt @@ -1,2 +1 @@ google-cloud-bigtable==2.22.0 -snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/reads/snapshots/snap_reads_test.py b/samples/snippets/reads/snapshots/snap_reads_test.py index f45e98f2e..564a4df7e 100644 --- a/samples/snippets/reads/snapshots/snap_reads_test.py +++ b/samples/snippets/reads/snapshots/snap_reads_test.py @@ -1,19 +1,18 @@ # -*- coding: utf-8 -*- -# snapshottest: v1 - https://goo.gl/zC4yUc +# this was previously implemented using the `snapshottest` package (https://goo.gl/zC4yUc), +# which is not compatible with Python 3.12. So we moved to a standard dictionary storing +# expected outputs for each test from __future__ import unicode_literals -from snapshottest import Snapshot +snapshots = {} - -snapshots = Snapshot() - -snapshots['test_read_row_partial 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row_partial'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 ''' -snapshots['test_read_rows 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_rows'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -27,7 +26,7 @@ ''' -snapshots['test_read_row_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row_range'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -47,7 +46,7 @@ ''' -snapshots['test_read_row_ranges 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row_ranges'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -79,7 +78,7 @@ ''' -snapshots['test_read_prefix 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_prefix'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -111,7 +110,7 @@ ''' -snapshots['test_read_filter 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_filter'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 @@ -133,7 +132,7 @@ ''' -snapshots['test_read_row 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 diff --git a/samples/snippets/writes/requirements-test.txt b/samples/snippets/writes/requirements-test.txt index aaa563abc..43b02e724 100644 --- a/samples/snippets/writes/requirements-test.txt +++ b/samples/snippets/writes/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/writes/write_batch.py b/samples/snippets/writes/write_batch.py index fd5117242..8ad4b07a5 100644 --- a/samples/snippets/writes/write_batch.py +++ b/samples/snippets/writes/write_batch.py @@ -16,6 +16,7 @@ import datetime from google.cloud import bigtable +from google.cloud.bigtable.batcher import MutationsBatcher def write_batch(project_id, instance_id, table_id): @@ -23,23 +24,21 @@ def write_batch(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - timestamp = datetime.datetime.utcnow() - column_family_id = "stats_summary" + with MutationsBatcher(table=table) as batcher: + timestamp = datetime.datetime.utcnow() + column_family_id = "stats_summary" - rows = [ - table.direct_row("tablet#a0b81f74#20190501"), - table.direct_row("tablet#a0b81f74#20190502"), - ] + rows = [ + table.direct_row("tablet#a0b81f74#20190501"), + table.direct_row("tablet#a0b81f74#20190502"), + ] - rows[0].set_cell(column_family_id, "connected_wifi", 1, timestamp) - rows[0].set_cell(column_family_id, "os_build", "12155.0.0-rc1", timestamp) - rows[1].set_cell(column_family_id, "connected_wifi", 1, timestamp) - rows[1].set_cell(column_family_id, "os_build", "12145.0.0-rc6", timestamp) + rows[0].set_cell(column_family_id, "connected_wifi", 1, timestamp) + rows[0].set_cell(column_family_id, "os_build", "12155.0.0-rc1", timestamp) + rows[1].set_cell(column_family_id, "connected_wifi", 1, timestamp) + rows[1].set_cell(column_family_id, "os_build", "12145.0.0-rc6", timestamp) - response = table.mutate_rows(rows) - for i, status in enumerate(response): - if status.code != 0: - print("Error writing row: {}".format(status.message)) + batcher.mutate_rows(rows) print("Successfully wrote 2 rows.") diff --git a/samples/tableadmin/requirements-test.txt b/samples/tableadmin/requirements-test.txt index b4d30f505..aa143f59d 100644 --- a/samples/tableadmin/requirements-test.txt +++ b/samples/tableadmin/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==8.0.0 +pytest==7.4.4 google-cloud-testutils==1.4.0 diff --git a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py index 7a24cab54..10e9d101b 100644 --- a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py +++ b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py @@ -6628,7 +6628,6 @@ def test_create_instance_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -6922,7 +6921,6 @@ def test_get_instance_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -7193,7 +7191,6 @@ def test_list_instances_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -7474,7 +7471,6 @@ def test_update_instance_rest_required_fields(request_type=instance.Instance): jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -7757,7 +7753,6 @@ def test_partial_update_instance_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8029,7 +8024,6 @@ def test_delete_instance_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8368,7 +8362,6 @@ def test_create_cluster_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8671,7 +8664,6 @@ def test_get_cluster_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8943,7 +8935,6 @@ def test_list_clusters_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -9424,7 +9415,6 @@ def test_partial_update_cluster_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -9701,7 +9691,6 @@ def test_delete_cluster_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10047,7 +10036,6 @@ def test_create_app_profile_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10360,7 +10348,6 @@ def test_get_app_profile_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10633,7 +10620,6 @@ def test_list_app_profiles_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11061,7 +11047,6 @@ def test_update_app_profile_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11353,7 +11338,6 @@ def test_delete_app_profile_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11636,7 +11620,6 @@ def test_get_iam_policy_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11900,7 +11883,6 @@ def test_set_iam_policy_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -12171,7 +12153,6 @@ def test_test_iam_permissions_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -12450,7 +12431,6 @@ def test_list_hot_tablets_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) diff --git a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py index b52ad0606..67f02f9ce 100644 --- a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py +++ b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py @@ -7740,7 +7740,6 @@ def test_create_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8022,7 +8021,6 @@ def test_create_table_from_snapshot_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8311,7 +8309,6 @@ def test_list_tables_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -8659,7 +8656,6 @@ def test_get_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -9010,7 +9006,6 @@ def test_update_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -9286,7 +9281,6 @@ def test_delete_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -9538,7 +9532,6 @@ def test_undelete_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -9811,7 +9804,6 @@ def test_modify_column_families_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10095,7 +10087,6 @@ def test_drop_row_range_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10301,7 +10292,6 @@ def test_generate_consistency_token_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10581,7 +10571,6 @@ def test_check_consistency_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -10867,7 +10856,6 @@ def test_snapshot_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11165,7 +11153,6 @@ def test_get_snapshot_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11438,7 +11425,6 @@ def test_list_snapshots_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -11782,7 +11768,6 @@ def test_delete_snapshot_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -12131,7 +12116,6 @@ def test_create_backup_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -12439,7 +12423,6 @@ def test_get_backup_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -12814,7 +12797,6 @@ def test_update_backup_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -13097,7 +13079,6 @@ def test_delete_backup_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -13359,7 +13340,6 @@ def test_list_backups_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -13706,7 +13686,6 @@ def test_restore_table_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -13928,7 +13907,6 @@ def test_copy_backup_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -14221,7 +14199,6 @@ def test_get_iam_policy_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -14487,7 +14464,6 @@ def test_set_iam_policy_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -14760,7 +14736,6 @@ def test_test_iam_permissions_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) diff --git a/tests/unit/gapic/bigtable_v2/test_bigtable.py b/tests/unit/gapic/bigtable_v2/test_bigtable.py index ab05af426..105f9e49e 100644 --- a/tests/unit/gapic/bigtable_v2/test_bigtable.py +++ b/tests/unit/gapic/bigtable_v2/test_bigtable.py @@ -3428,7 +3428,6 @@ def test_read_rows_rest_required_fields(request_type=bigtable.ReadRowsRequest): jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -3716,7 +3715,6 @@ def test_sample_row_keys_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -3992,7 +3990,6 @@ def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest) jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -4290,7 +4287,6 @@ def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsReques jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -4580,7 +4576,6 @@ def test_check_and_mutate_row_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -4904,7 +4899,6 @@ def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmReq jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -5170,7 +5164,6 @@ def test_read_modify_write_row_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -5466,7 +5459,6 @@ def test_generate_initial_change_stream_partitions_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) @@ -5770,7 +5762,6 @@ def test_read_change_stream_rest_required_fields( jsonified_request = json.loads( json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False, ) ) diff --git a/tests/unit/v2_client/test_client.py b/tests/unit/v2_client/test_client.py index 5944c58a3..b6eb6ac96 100644 --- a/tests/unit/v2_client/test_client.py +++ b/tests/unit/v2_client/test_client.py @@ -176,7 +176,7 @@ def test_client_constructor_w_emulator_host(): emulator_host = "localhost:8081" with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.secure_channel") as factory: + with mock.patch("grpc.insecure_channel") as factory: client = _make_client() # don't test local_composite_credentials # client._local_composite_credentials = lambda: credentials @@ -188,7 +188,6 @@ def test_client_constructor_w_emulator_host(): assert client.project == _DEFAULT_BIGTABLE_EMULATOR_CLIENT factory.assert_called_once_with( emulator_host, - mock.ANY, # test of creds wrapping in '_emulator_host' below options=_GRPC_CHANNEL_OPTIONS, ) @@ -199,7 +198,7 @@ def test_client_constructor_w_emulator_host_w_project(): emulator_host = "localhost:8081" with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.secure_channel") as factory: + with mock.patch("grpc.insecure_channel") as factory: client = _make_client(project=PROJECT) # channels are formed when needed, so access a client # create a gapic channel @@ -209,7 +208,6 @@ def test_client_constructor_w_emulator_host_w_project(): assert client.project == PROJECT factory.assert_called_once_with( emulator_host, - mock.ANY, # test of creds wrapping in '_emulator_host' below options=_GRPC_CHANNEL_OPTIONS, ) @@ -222,7 +220,7 @@ def test_client_constructor_w_emulator_host_w_credentials(): emulator_host = "localhost:8081" credentials = _make_credentials() with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.secure_channel") as factory: + with mock.patch("grpc.insecure_channel") as factory: client = _make_client(credentials=credentials) # channels are formed when needed, so access a client # create a gapic channel @@ -232,7 +230,6 @@ def test_client_constructor_w_emulator_host_w_credentials(): assert client.project == _DEFAULT_BIGTABLE_EMULATOR_CLIENT factory.assert_called_once_with( emulator_host, - mock.ANY, # test of creds wrapping in '_emulator_host' below options=_GRPC_CHANNEL_OPTIONS, ) @@ -271,15 +268,13 @@ def test_client__emulator_channel_w_sync(): project=PROJECT, credentials=_make_credentials(), read_only=True ) client._emulator_host = emulator_host - lcc = client._local_composite_credentials = mock.Mock(spec=[]) - with mock.patch("grpc.secure_channel") as patched: + with mock.patch("grpc.insecure_channel") as patched: channel = client._emulator_channel(transport, options) assert channel is patched.return_value patched.assert_called_once_with( emulator_host, - lcc.return_value, options=options, ) @@ -293,56 +288,17 @@ def test_client__emulator_channel_w_async(): project=PROJECT, credentials=_make_credentials(), read_only=True ) client._emulator_host = emulator_host - lcc = client._local_composite_credentials = mock.Mock(spec=[]) - with mock.patch("grpc.aio.secure_channel") as patched: + with mock.patch("grpc.aio.insecure_channel") as patched: channel = client._emulator_channel(transport, options) assert channel is patched.return_value patched.assert_called_once_with( emulator_host, - lcc.return_value, options=options, ) -def test_client__local_composite_credentials(): - client = _make_client( - project=PROJECT, credentials=_make_credentials(), read_only=True - ) - - wsir_patch = mock.patch("google.auth.credentials.with_scopes_if_required") - request_patch = mock.patch("google.auth.transport.requests.Request") - amp_patch = mock.patch("google.auth.transport.grpc.AuthMetadataPlugin") - grpc_patches = mock.patch.multiple( - "grpc", - metadata_call_credentials=mock.DEFAULT, - local_channel_credentials=mock.DEFAULT, - composite_channel_credentials=mock.DEFAULT, - ) - with wsir_patch as wsir_patched: - with request_patch as request_patched: - with amp_patch as amp_patched: - with grpc_patches as grpc_patched: - credentials = client._local_composite_credentials() - - grpc_mcc = grpc_patched["metadata_call_credentials"] - grpc_lcc = grpc_patched["local_channel_credentials"] - grpc_ccc = grpc_patched["composite_channel_credentials"] - - assert credentials is grpc_ccc.return_value - - wsir_patched.assert_called_once_with(client._credentials, None) - request_patched.assert_called_once_with() - amp_patched.assert_called_once_with( - wsir_patched.return_value, - request_patched.return_value, - ) - grpc_mcc.assert_called_once_with(amp_patched.return_value) - grpc_lcc.assert_called_once_with() - grpc_ccc.assert_called_once_with(grpc_lcc.return_value, grpc_mcc.return_value) - - def _create_gapic_client_channel_helper(endpoint=None, emulator_host=None): from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS