From 2660dbd4821a89a1e20e3e1541504a409f1979aa Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Mon, 11 Mar 2024 10:04:17 -0700 Subject: [PATCH 01/15] fix: correct type checking (#1848) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Correct the way we check whether `self._done_timeout` is an instance of `object` class or not. Fixes #1838 🦕 --- google/cloud/bigquery/job/query.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py index e45a46894..83d2751ce 100644 --- a/google/cloud/bigquery/job/query.py +++ b/google/cloud/bigquery/job/query.py @@ -1409,9 +1409,9 @@ def _reload_query_results( # Python_API_core, as part of a major rewrite of the deadline, timeout, # retry process sets the timeout value as a Python object(). # Our system does not natively handle that and instead expects - # either none or a numeric value. If passed a Python object, convert to + # either None or a numeric value. If passed a Python object, convert to # None. - if isinstance(self._done_timeout, object): # pragma: NO COVER + if type(self._done_timeout) is object: # pragma: NO COVER self._done_timeout = None if self._done_timeout is not None: # pragma: NO COVER From 04f23780b66c4487333dbc592410a6930c9308ae Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 12 Mar 2024 13:33:12 +0100 Subject: [PATCH 02/15] chore(deps): update all dependencies (#1849) * chore(deps): update all dependencies * Update samples/desktopapp/requirements-test.txt * Update samples/geography/requirements-test.txt * Update samples/magics/requirements-test.txt * Update samples/magics/requirements.txt * Update samples/notebooks/requirements-test.txt * Update samples/notebooks/requirements.txt * Update samples/snippets/requirements-test.txt --------- Co-authored-by: Chalmer Lowe --- samples/desktopapp/requirements-test.txt | 2 +- samples/desktopapp/requirements.txt | 2 +- samples/geography/requirements-test.txt | 2 +- samples/geography/requirements.txt | 2 +- samples/magics/requirements-test.txt | 2 +- samples/magics/requirements.txt | 4 ++-- samples/notebooks/requirements-test.txt | 2 +- samples/notebooks/requirements.txt | 4 ++-- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 2 +- 10 files changed, 12 insertions(+), 12 deletions(-) diff --git a/samples/desktopapp/requirements-test.txt b/samples/desktopapp/requirements-test.txt index 99d27b06a..9142d4905 100644 --- a/samples/desktopapp/requirements-test.txt +++ b/samples/desktopapp/requirements-test.txt @@ -1,4 +1,4 @@ google-cloud-testutils==1.4.0 -pytest==7.4.4; python_version == '3.7' +pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 diff --git a/samples/desktopapp/requirements.txt b/samples/desktopapp/requirements.txt index 78074bbca..8561934dc 100644 --- a/samples/desktopapp/requirements.txt +++ b/samples/desktopapp/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery==3.18.0 +google-cloud-bigquery==3.19.0 google-auth-oauthlib==1.2.0 diff --git a/samples/geography/requirements-test.txt b/samples/geography/requirements-test.txt index a91fa2d55..f052969d3 100644 --- a/samples/geography/requirements-test.txt +++ b/samples/geography/requirements-test.txt @@ -1,3 +1,3 @@ -pytest==7.4.4; python_version == '3.7' +pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index c85bf06d0..b474e252c 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -15,7 +15,7 @@ geopandas===0.13.2; python_version == '3.8' geopandas==0.14.3; python_version >= '3.9' google-api-core==2.17.1 google-auth==2.28.2 -google-cloud-bigquery==3.18.0 +google-cloud-bigquery==3.19.0 google-cloud-bigquery-storage==2.24.0 google-cloud-core==2.4.1 google-crc32c==1.5.0 diff --git a/samples/magics/requirements-test.txt b/samples/magics/requirements-test.txt index 99d27b06a..9142d4905 100644 --- a/samples/magics/requirements-test.txt +++ b/samples/magics/requirements-test.txt @@ -1,4 +1,4 @@ google-cloud-testutils==1.4.0 -pytest==7.4.4; python_version == '3.7' +pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 diff --git a/samples/magics/requirements.txt b/samples/magics/requirements.txt index ea987358f..9179db067 100644 --- a/samples/magics/requirements.txt +++ b/samples/magics/requirements.txt @@ -1,9 +1,9 @@ db-dtypes==1.2.0 -google.cloud.bigquery==3.18.0 +google.cloud.bigquery==3.19.0 google-cloud-bigquery-storage==2.24.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.18.1; python_version >= '3.9' +ipython===8.18.1; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.1; python_version >= '3.9' diff --git a/samples/notebooks/requirements-test.txt b/samples/notebooks/requirements-test.txt index 99d27b06a..9142d4905 100644 --- a/samples/notebooks/requirements-test.txt +++ b/samples/notebooks/requirements-test.txt @@ -1,4 +1,4 @@ google-cloud-testutils==1.4.0 -pytest==7.4.4; python_version == '3.7' +pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 diff --git a/samples/notebooks/requirements.txt b/samples/notebooks/requirements.txt index 5ce95818e..8f2e93620 100644 --- a/samples/notebooks/requirements.txt +++ b/samples/notebooks/requirements.txt @@ -1,9 +1,9 @@ db-dtypes==1.2.0 -google-cloud-bigquery==3.18.0 +google-cloud-bigquery==3.19.0 google-cloud-bigquery-storage==2.24.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.18.1; python_version >= '3.9' +ipython===8.18.1; python_version >= '3.9' matplotlib===3.5.3; python_version == '3.7' matplotlib===3.7.4; python_version == '3.8' matplotlib==3.8.3; python_version >= '3.9' diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 99d27b06a..9142d4905 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ google-cloud-testutils==1.4.0 -pytest==7.4.4; python_version == '3.7' +pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index fc0a2ef36..b3347499f 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-bigquery==3.18.0 \ No newline at end of file +google-cloud-bigquery==3.19.0 \ No newline at end of file From dc932415a0ee96dc5ae3601d0e757c70712291a0 Mon Sep 17 00:00:00 2001 From: shollyman Date: Tue, 12 Mar 2024 09:01:49 -0700 Subject: [PATCH 03/15] testing: unhook prerelease-deps-3.12 from presubmit (#1851) Testing for prerelease-deps is done within continuous. Co-authored-by: Chalmer Lowe --- .kokoro/presubmit/prerelease-deps-3.12.cfg | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 .kokoro/presubmit/prerelease-deps-3.12.cfg diff --git a/.kokoro/presubmit/prerelease-deps-3.12.cfg b/.kokoro/presubmit/prerelease-deps-3.12.cfg deleted file mode 100644 index ece962a17..000000000 --- a/.kokoro/presubmit/prerelease-deps-3.12.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps-3.12" -} From 71393e0a40a64911700d67b5bf527ec44e35e360 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 10:32:03 -0700 Subject: [PATCH 04/15] chore(python): add requirements for docs build (#1858) Source-Link: https://github.com/googleapis/synthtool/commit/85c23b6bc4352c1b0674848eaeb4e48645aeda6b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/build.sh | 7 ----- .kokoro/docker/docs/Dockerfile | 4 +++ .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 38 ++++++++++++++++++++++++++++ 5 files changed, 45 insertions(+), 9 deletions(-) create mode 100644 .kokoro/docker/docs/requirements.in create mode 100644 .kokoro/docker/docs/requirements.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e4e943e02..5d9542b1c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f +# created: 2024-03-15T16:26:15.743347415Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 0cb0d0dd0..f38bda804 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 8e39a2cc4..bdaf39fe2 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000..816817c67 --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000..0e5d70f20 --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox From c1b4dfff71f0c64078cb104479eaabf94b20d176 Mon Sep 17 00:00:00 2001 From: shollyman Date: Fri, 15 Mar 2024 15:07:31 -0700 Subject: [PATCH 05/15] testing: reduce python versions in unit testing (#1857) * testing: evaluate reducing versions under unit test * align unit and system versions under test * opt 3.7 back in * widen range of versions --------- Co-authored-by: Lingqing Gan --- noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index c31d098b8..9445f4f74 100644 --- a/noxfile.py +++ b/noxfile.py @@ -38,7 +38,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.11", "3.12"] -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.12"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' From bb59f734cfcff4912b95dde1f79a48d5d8978bfe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:12:39 -0700 Subject: [PATCH 06/15] chore(python): update dependencies in /.kokoro (#1859) Source-Link: https://github.com/googleapis/synthtool/commit/db94845da69ccdfefd7ce55c84e6cfa74829747e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/requirements.in | 3 +- .kokoro/requirements.txt | 114 +++++++++++++++++--------------------- 3 files changed, 56 insertions(+), 65 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 5d9542b1c..dc9c56e9d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f -# created: 2024-03-15T16:26:15.743347415Z + digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 +# created: 2024-03-15T16:25:47.905264637Z \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index ec867d9fd..fff4d9ce0 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bda8e38c4..dd61f5f32 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in From 38b8e5390f373b45e7cc7611d67e3fd7db8ec5e8 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 19 Mar 2024 00:56:11 +0100 Subject: [PATCH 07/15] chore(deps): update dependency pyarrow to v15.0.2 (#1861) --- samples/geography/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index b474e252c..c7a793358 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -31,7 +31,7 @@ pandas===2.0.3; python_version == '3.8' pandas==2.2.1; python_version >= '3.9' proto-plus==1.23.0 pyarrow==12.0.1; python_version == '3.7' -pyarrow==15.0.1; python_version >= '3.8' +pyarrow==15.0.2; python_version >= '3.8' pyasn1==0.5.1 pyasn1-modules==0.3.0 pycparser==2.21 From 0ac6e9bf186945832f5dcdf5a4d95667b4da223e Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Tue, 19 Mar 2024 09:51:49 -0400 Subject: [PATCH 08/15] fix: update error logging when converting to pyarrow column fails (#1836) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: update error logging when converting to pyarrow column fails * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * resolve merge conflict * resolve missing dependency * more tweaks to constraints and requirements re pyarrow * even more tweaks to constraints and requirements re pyarrow * a few more tweaks to constraints and requirements re pyarrow * resolves issue of pyarrow not installing * fix linting issue * update linting and conditionals * update linting and mypy comments * quick tags on several coverage issues related to imports * adds pragma to exception * updates test suite with new test and makes msg explicit * temporarily adding timing code * additional timing test mods * add pragmas to account for several tests * cleaned up some test code * cleaned up some test code * Update a test to include column datatype * update to pytest.raises command * Update tests/unit/test__pandas_helpers.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * removed unused variable 'e' --------- Co-authored-by: Owl Bot --- google/cloud/bigquery/_pandas_helpers.py | 20 +++++++++++++------- google/cloud/bigquery/_pyarrow_helpers.py | 2 +- noxfile.py | 15 +++++++++++---- samples/desktopapp/requirements-test.txt | 1 + samples/snippets/requirements-test.txt | 1 + testing/constraints-3.11.txt | 1 + testing/constraints-3.12.txt | 1 + testing/constraints-3.7.txt | 2 +- tests/unit/test__pandas_helpers.py | 17 +++++++++++++++-- tests/unit/test_table.py | 4 ++-- 10 files changed, 47 insertions(+), 17 deletions(-) diff --git a/google/cloud/bigquery/_pandas_helpers.py b/google/cloud/bigquery/_pandas_helpers.py index e97dda7e5..9f8dcfde4 100644 --- a/google/cloud/bigquery/_pandas_helpers.py +++ b/google/cloud/bigquery/_pandas_helpers.py @@ -49,10 +49,11 @@ db_dtypes_import_exception = exc date_dtype_name = time_dtype_name = "" # Use '' rather than None because pytype -pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import() +pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import(raise_if_error=True) +from pyarrow import ArrowTypeError # type: ignore # noqa: E402 _BIGNUMERIC_SUPPORT = False -if pyarrow is not None: +if pyarrow is not None: # pragma: NO COVER _BIGNUMERIC_SUPPORT = True try: @@ -302,11 +303,16 @@ def bq_to_arrow_array(series, bq_field): field_type_upper = bq_field.field_type.upper() if bq_field.field_type else "" - if bq_field.mode.upper() == "REPEATED": - return pyarrow.ListArray.from_pandas(series, type=arrow_type) - if field_type_upper in schema._STRUCT_TYPES: - return pyarrow.StructArray.from_pandas(series, type=arrow_type) - return pyarrow.Array.from_pandas(series, type=arrow_type) + try: + if bq_field.mode.upper() == "REPEATED": + return pyarrow.ListArray.from_pandas(series, type=arrow_type) + if field_type_upper in schema._STRUCT_TYPES: + return pyarrow.StructArray.from_pandas(series, type=arrow_type) + return pyarrow.Array.from_pandas(series, type=arrow_type) + except ArrowTypeError: # pragma: NO COVER + msg = f"""Error converting Pandas column with name: "{series.name}" and datatype: "{series.dtype}" to an appropriate pyarrow datatype: Array, ListArray, or StructArray""" + _LOGGER.error(msg) + raise ArrowTypeError(msg) def get_column_or_index(dataframe, name): diff --git a/google/cloud/bigquery/_pyarrow_helpers.py b/google/cloud/bigquery/_pyarrow_helpers.py index 946743eaf..06509cc93 100644 --- a/google/cloud/bigquery/_pyarrow_helpers.py +++ b/google/cloud/bigquery/_pyarrow_helpers.py @@ -49,7 +49,7 @@ def pyarrow_timestamp(): _BQ_TO_ARROW_SCALARS = {} _ARROW_SCALAR_IDS_TO_BQ = {} -if pyarrow: +if pyarrow: # pragma: NO COVER # This dictionary is duplicated in bigquery_storage/test/unite/test_reader.py # When modifying it be sure to update it there as well. # Note(todo!!): type "BIGNUMERIC"'s matching pyarrow type is added in _pandas_helpers.py diff --git a/noxfile.py b/noxfile.py index 9445f4f74..548690afa 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,7 +18,6 @@ import os import re import shutil - import nox @@ -66,6 +65,7 @@ def default(session, install_extras=True): Python corresponding to the ``nox`` binary the ``PATH`` can run the tests. """ + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) @@ -86,8 +86,7 @@ def default(session, install_extras=True): install_target = ".[all]" else: install_target = "." - session.install("-e", install_target, "-c", constraints_path) - + session.install("-e", install_target) session.run("python", "-m", "pip", "freeze") # Run py.test against the unit tests. @@ -108,6 +107,7 @@ def default(session, install_extras=True): @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" + default(session) @@ -118,8 +118,11 @@ def unit_noextras(session): # Install optional dependencies that are out-of-date. # https://github.com/googleapis/python-bigquery/issues/933 # There is no pyarrow 1.0.0 package for Python 3.9. + if session.python == UNIT_TEST_PYTHON_VERSIONS[0]: - session.install("pyarrow==1.0.0") + session.install("pyarrow>=3.0.0") + elif session.python == UNIT_TEST_PYTHON_VERSIONS[-1]: + session.install("pyarrow") default(session, install_extras=False) @@ -127,6 +130,7 @@ def unit_noextras(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def mypy(session): """Run type checks with mypy.""" + session.install("-e", ".[all]") session.install(MYPY_VERSION) @@ -147,6 +151,7 @@ def pytype(session): # An indirect dependecy attrs==21.1.0 breaks the check, and installing a less # recent version avoids the error until a possibly better fix is found. # https://github.com/googleapis/python-bigquery/issues/655 + session.install("attrs==20.3.0") session.install("-e", ".[all]") session.install(PYTYPE_VERSION) @@ -206,6 +211,7 @@ def system(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def mypy_samples(session): """Run type checks with mypy.""" + session.install("pytest") for requirements_path in CURRENT_DIRECTORY.glob("samples/*/requirements.txt"): session.install("-r", str(requirements_path)) @@ -283,6 +289,7 @@ def cover(session): This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ + session.install("coverage", "pytest-cov") session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/samples/desktopapp/requirements-test.txt b/samples/desktopapp/requirements-test.txt index 9142d4905..413a7fd48 100644 --- a/samples/desktopapp/requirements-test.txt +++ b/samples/desktopapp/requirements-test.txt @@ -2,3 +2,4 @@ google-cloud-testutils==1.4.0 pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 +pyarrow>=3.0.0 \ No newline at end of file diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 9142d4905..413a7fd48 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -2,3 +2,4 @@ google-cloud-testutils==1.4.0 pytest===7.4.4; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' mock==5.1.0 +pyarrow>=3.0.0 \ No newline at end of file diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29bb..e80ca0ccf 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1 @@ +pyarrow>=3.0.0 \ No newline at end of file diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt index e69de29bb..e80ca0ccf 100644 --- a/testing/constraints-3.12.txt +++ b/testing/constraints-3.12.txt @@ -0,0 +1 @@ +pyarrow>=3.0.0 \ No newline at end of file diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 28787adb7..1fc7c6838 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -27,7 +27,7 @@ packaging==20.0.0 pandas==1.1.0 proto-plus==1.22.0 protobuf==3.19.5 -pyarrow==3.0.0 +pyarrow>=3.0.0 python-dateutil==2.7.3 requests==2.21.0 Shapely==1.8.4 diff --git a/tests/unit/test__pandas_helpers.py b/tests/unit/test__pandas_helpers.py index abee39065..244384620 100644 --- a/tests/unit/test__pandas_helpers.py +++ b/tests/unit/test__pandas_helpers.py @@ -53,6 +53,7 @@ if pyarrow: import pyarrow.parquet import pyarrow.types + from pyarrow import ArrowTypeError # type: ignore # noqa: E402 else: # pragma: NO COVER # Mock out pyarrow when missing, because methods from pyarrow.types are # used in test parameterization. @@ -557,13 +558,25 @@ def test_bq_to_arrow_array_w_pandas_timestamp(module_under_test, bq_type, rows): @pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") def test_bq_to_arrow_array_w_arrays(module_under_test): rows = [[1, 2, 3], [], [4, 5, 6]] - series = pandas.Series(rows, dtype="object") + series = pandas.Series(rows, name="test_col", dtype="object") bq_field = schema.SchemaField("field_name", "INTEGER", mode="REPEATED") arrow_array = module_under_test.bq_to_arrow_array(series, bq_field) roundtrip = arrow_array.to_pylist() assert rows == roundtrip +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_bq_to_arrow_array_w_conversion_fail(module_under_test): # pragma: NO COVER + rows = [[1, 2, 3], [], [4, 5, 6]] + series = pandas.Series(rows, name="test_col", dtype="object") + bq_field = schema.SchemaField("field_name", "STRING", mode="REPEATED") + exc_msg = f"""Error converting Pandas column with name: "{series.name}" and datatype: "{series.dtype}" to an appropriate pyarrow datatype: Array, ListArray, or StructArray""" + with pytest.raises(ArrowTypeError, match=exc_msg): + module_under_test.bq_to_arrow_array(series, bq_field) + raise ArrowTypeError(exc_msg) + + @pytest.mark.parametrize("bq_type", ["RECORD", "record", "STRUCT", "struct"]) @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") @pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`") @@ -573,7 +586,7 @@ def test_bq_to_arrow_array_w_structs(module_under_test, bq_type): None, {"int_col": 456, "string_col": "def"}, ] - series = pandas.Series(rows, dtype="object") + series = pandas.Series(rows, name="test_col", dtype="object") bq_field = schema.SchemaField( "field_name", bq_type, diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py index 0d549120f..a8107ee97 100644 --- a/tests/unit/test_table.py +++ b/tests/unit/test_table.py @@ -49,7 +49,7 @@ pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import() -if pyarrow: +if pyarrow: # pragma: NO COVER import pyarrow.types try: @@ -3743,7 +3743,7 @@ def test_to_dataframe_w_dtypes_mapper(self): if hasattr(pandas, "Float64Dtype"): self.assertEqual(list(df.miles), [1.77, 6.66, 2.0]) self.assertEqual(df.miles.dtype.name, "Float64") - else: + else: # pragma: NO COVER self.assertEqual(list(df.miles), ["1.77", "6.66", "2.0"]) self.assertEqual(df.miles.dtype.name, "string") From f8f70a35b562d13ca5ae87f29e1ddf0f3833ff70 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 21 Mar 2024 13:16:00 +0100 Subject: [PATCH 09/15] chore(deps): update dependency google-auth to v2.29.0 (#1865) --- samples/geography/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index c7a793358..1cb20b102 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -14,7 +14,7 @@ geopandas===0.10.2; python_version == '3.7' geopandas===0.13.2; python_version == '3.8' geopandas==0.14.3; python_version >= '3.9' google-api-core==2.17.1 -google-auth==2.28.2 +google-auth==2.29.0 google-cloud-bigquery==3.19.0 google-cloud-bigquery-storage==2.24.0 google-cloud-core==2.4.1 From b0e95a05d7dbe94f246600ed6c5ebbc2b6a7013f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 22 Mar 2024 11:42:22 +0100 Subject: [PATCH 10/15] chore(deps): update dependency google-api-core to v2.18.0 (#1866) --- samples/geography/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index 1cb20b102..6fa7ffc7e 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -13,7 +13,7 @@ geojson==3.1.0 geopandas===0.10.2; python_version == '3.7' geopandas===0.13.2; python_version == '3.8' geopandas==0.14.3; python_version >= '3.9' -google-api-core==2.17.1 +google-api-core==2.18.0 google-auth==2.29.0 google-cloud-bigquery==3.19.0 google-cloud-bigquery-storage==2.24.0 From e265db6a6a37d13056dcaac240c2cf3975dfd644 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Wed, 27 Mar 2024 09:58:18 -0500 Subject: [PATCH 11/15] fix: use an allowlist instead of denylist to determine when `query_and_wait` uses `jobs.query` API (#1869) --- google/cloud/bigquery/_job_helpers.py | 53 ++++++++++++++++++--------- tests/unit/test__job_helpers.py | 15 +++++++- 2 files changed, 49 insertions(+), 19 deletions(-) diff --git a/google/cloud/bigquery/_job_helpers.py b/google/cloud/bigquery/_job_helpers.py index 0692c9b65..602a49eba 100644 --- a/google/cloud/bigquery/_job_helpers.py +++ b/google/cloud/bigquery/_job_helpers.py @@ -400,9 +400,13 @@ def query_and_wait( :class:`~google.cloud.bigquery.job.QueryJobConfig` class. """ + request_body = _to_query_request( + query=query, job_config=job_config, location=location, timeout=api_timeout + ) + # Some API parameters aren't supported by the jobs.query API. In these # cases, fallback to a jobs.insert call. - if not _supported_by_jobs_query(job_config): + if not _supported_by_jobs_query(request_body): return _wait_or_cancel( query_jobs_insert( client=client, @@ -424,9 +428,6 @@ def query_and_wait( ) path = _to_query_path(project) - request_body = _to_query_request( - query=query, job_config=job_config, location=location, timeout=api_timeout - ) if page_size is not None and max_results is not None: request_body["maxResults"] = min(page_size, max_results) @@ -506,20 +507,38 @@ def do_query(): return do_query() -def _supported_by_jobs_query(job_config: Optional[job.QueryJobConfig]) -> bool: +def _supported_by_jobs_query(request_body: Dict[str, Any]) -> bool: """True if jobs.query can be used. False if jobs.insert is needed.""" - if job_config is None: - return True - - return ( - # These features aren't supported by jobs.query. - job_config.clustering_fields is None - and job_config.destination is None - and job_config.destination_encryption_configuration is None - and job_config.range_partitioning is None - and job_config.table_definitions is None - and job_config.time_partitioning is None - ) + request_keys = frozenset(request_body.keys()) + + # Per issue: https://github.com/googleapis/python-bigquery/issues/1867 + # use an allowlist here instead of a denylist because the backend API allows + # unsupported parameters without any warning or failure. Instead, keep this + # set in sync with those in QueryRequest: + # https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#QueryRequest + keys_allowlist = { + "kind", + "query", + "maxResults", + "defaultDataset", + "timeoutMs", + "dryRun", + "preserveNulls", + "useQueryCache", + "useLegacySql", + "parameterMode", + "queryParameters", + "location", + "formatOptions", + "connectionProperties", + "labels", + "maximumBytesBilled", + "requestId", + "createSession", + } + + unsupported_keys = request_keys - keys_allowlist + return len(unsupported_keys) == 0 def _wait_or_cancel( diff --git a/tests/unit/test__job_helpers.py b/tests/unit/test__job_helpers.py index c30964c57..671b829f7 100644 --- a/tests/unit/test__job_helpers.py +++ b/tests/unit/test__job_helpers.py @@ -22,6 +22,7 @@ import pytest from google.cloud.bigquery.client import Client +from google.cloud.bigquery import enums from google.cloud.bigquery import _job_helpers from google.cloud.bigquery.job import copy_ as job_copy from google.cloud.bigquery.job import extract as job_extract @@ -1141,12 +1142,22 @@ def test_make_job_id_w_job_id_overrides_prefix(): False, id="destination_encryption_configuration", ), + # priority="BATCH" is not supported. See: + # https://github.com/googleapis/python-bigquery/issues/1867 + pytest.param( + job_query.QueryJobConfig( + priority=enums.QueryPriority.BATCH, + ), + False, + id="priority=BATCH", + ), ), ) -def test_supported_by_jobs_query( +def test_supported_by_jobs_query_from_queryjobconfig( job_config: Optional[job_query.QueryJobConfig], expected: bool ): - assert _job_helpers._supported_by_jobs_query(job_config) == expected + request_body = _job_helpers._to_query_request(job_config, query="SELECT 1") + assert _job_helpers._supported_by_jobs_query(request_body) == expected def test_wait_or_cancel_no_exception(): From 08b1e6f9c41121907c345daedbae40ece18e8b6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Wed, 27 Mar 2024 10:28:27 -0500 Subject: [PATCH 12/15] feat: add `fields` parameter to `set_iam_policy` for consistency with update methods (#1872) --- google/cloud/bigquery/client.py | 79 +++++++++++++++++++++- samples/snippets/create_iam_policy_test.py | 44 ++++++++++++ tests/system/test_client.py | 28 -------- tests/unit/test_client.py | 67 ++++++++++++++++++ 4 files changed, 188 insertions(+), 30 deletions(-) create mode 100644 samples/snippets/create_iam_policy_test.py diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index 408e7e49c..5521e2e1e 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -882,6 +882,35 @@ def get_iam_policy( retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> Policy: + """Return the access control policy for a table resource. + + Args: + table (Union[ \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ + google.cloud.bigquery.table.TableListItem, \ + str, \ + ]): + The table to get the access control policy for. + If a string is passed in, this method attempts to create a + table reference from a string using + :func:`~google.cloud.bigquery.table.TableReference.from_string`. + requested_policy_version (int): + Optional. The maximum policy version that will be used to format the policy. + + Only version ``1`` is currently supported. + + See: https://cloud.google.com/bigquery/docs/reference/rest/v2/GetPolicyOptions + retry (Optional[google.api_core.retry.Retry]): + How to retry the RPC. + timeout (Optional[float]): + The number of seconds to wait for the underlying HTTP transport + before using ``retry``. + + Returns: + google.api_core.iam.Policy: + The access control policy. + """ table = _table_arg_to_table_ref(table, default_project=self.project) if requested_policy_version != 1: @@ -910,7 +939,53 @@ def set_iam_policy( updateMask: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, + *, + fields: Sequence[str] = (), ) -> Policy: + """Return the access control policy for a table resource. + + Args: + table (Union[ \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ + google.cloud.bigquery.table.TableListItem, \ + str, \ + ]): + The table to get the access control policy for. + If a string is passed in, this method attempts to create a + table reference from a string using + :func:`~google.cloud.bigquery.table.TableReference.from_string`. + policy (google.api_core.iam.Policy): + The access control policy to set. + updateMask (Optional[str]): + Mask as defined by + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/setIamPolicy#body.request_body.FIELDS.update_mask + + Incompatible with ``fields``. + retry (Optional[google.api_core.retry.Retry]): + How to retry the RPC. + timeout (Optional[float]): + The number of seconds to wait for the underlying HTTP transport + before using ``retry``. + fields (Sequence[str]): + Which properties to set on the policy. See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/setIamPolicy#body.request_body.FIELDS.update_mask + + Incompatible with ``updateMask``. + + Returns: + google.api_core.iam.Policy: + The updated access control policy. + """ + if updateMask is not None and not fields: + update_mask = updateMask + elif updateMask is not None and fields: + raise ValueError("Cannot set both fields and updateMask") + elif fields: + update_mask = ",".join(fields) + else: + update_mask = None + table = _table_arg_to_table_ref(table, default_project=self.project) if not isinstance(policy, (Policy)): @@ -918,8 +993,8 @@ def set_iam_policy( body = {"policy": policy.to_api_repr()} - if updateMask is not None: - body["updateMask"] = updateMask + if update_mask is not None: + body["updateMask"] = update_mask path = "{}:setIamPolicy".format(table.path) span_attributes = {"path": path} diff --git a/samples/snippets/create_iam_policy_test.py b/samples/snippets/create_iam_policy_test.py new file mode 100644 index 000000000..c41ced2cd --- /dev/null +++ b/samples/snippets/create_iam_policy_test.py @@ -0,0 +1,44 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def test_create_iam_policy(table_id: str): + your_table_id = table_id + + # [START bigquery_create_iam_policy] + from google.cloud import bigquery + + bqclient = bigquery.Client() + + policy = bqclient.get_iam_policy( + your_table_id, # e.g. "project.dataset.table" + ) + + analyst_email = "example-analyst-group@google.com" + binding = { + "role": "roles/bigquery.dataViewer", + "members": {f"group:{analyst_email}"}, + } + policy.bindings.append(binding) + + updated_policy = bqclient.set_iam_policy( + your_table_id, # e.g. "project.dataset.table" + policy, + ) + + for binding in updated_policy.bindings: + print(repr(binding)) + # [END bigquery_create_iam_policy] + + assert binding in updated_policy.bindings diff --git a/tests/system/test_client.py b/tests/system/test_client.py index 04740de8a..414239323 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -36,7 +36,6 @@ from google.api_core.exceptions import InternalServerError from google.api_core.exceptions import ServiceUnavailable from google.api_core.exceptions import TooManyRequests -from google.api_core.iam import Policy from google.cloud import bigquery from google.cloud.bigquery.dataset import Dataset from google.cloud.bigquery.dataset import DatasetReference @@ -1485,33 +1484,6 @@ def test_copy_table(self): got_rows = self._fetch_single_page(dest_table) self.assertTrue(len(got_rows) > 0) - def test_get_set_iam_policy(self): - from google.cloud.bigquery.iam import BIGQUERY_DATA_VIEWER_ROLE - - dataset = self.temp_dataset(_make_dataset_id("create_table")) - table_id = "test_table" - table_ref = Table(dataset.table(table_id)) - self.assertFalse(_table_exists(table_ref)) - - table = helpers.retry_403(Config.CLIENT.create_table)(table_ref) - self.to_delete.insert(0, table) - - self.assertTrue(_table_exists(table)) - - member = "serviceAccount:{}".format(Config.CLIENT.get_service_account_email()) - BINDING = { - "role": BIGQUERY_DATA_VIEWER_ROLE, - "members": {member}, - } - - policy = Config.CLIENT.get_iam_policy(table) - self.assertIsInstance(policy, Policy) - self.assertEqual(policy.bindings, []) - - policy.bindings.append(BINDING) - returned_policy = Config.CLIENT.set_iam_policy(table, policy) - self.assertEqual(returned_policy.bindings, policy.bindings) - def test_test_iam_permissions(self): dataset = self.temp_dataset(_make_dataset_id("create_table")) table_id = "test_table" diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index d20712a8a..60dcab85e 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -1782,6 +1782,60 @@ def test_set_iam_policy(self): from google.cloud.bigquery.iam import BIGQUERY_DATA_VIEWER_ROLE from google.api_core.iam import Policy + PATH = "/projects/%s/datasets/%s/tables/%s:setIamPolicy" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) + ETAG = "foo" + VERSION = 1 + OWNER1 = "user:phred@example.com" + OWNER2 = "group:cloud-logs@google.com" + EDITOR1 = "domain:google.com" + EDITOR2 = "user:phred@example.com" + VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" + VIEWER2 = "user:phred@example.com" + BINDINGS = [ + {"role": BIGQUERY_DATA_OWNER_ROLE, "members": [OWNER1, OWNER2]}, + {"role": BIGQUERY_DATA_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, + {"role": BIGQUERY_DATA_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + ] + FIELDS = ("bindings", "etag") + RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} + + policy = Policy() + for binding in BINDINGS: + policy[binding["role"]] = binding["members"] + + BODY = {"policy": policy.to_api_repr(), "updateMask": "bindings,etag"} + + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection(RETURNED) + + with mock.patch( + "google.cloud.bigquery.opentelemetry_tracing._get_final_span_attributes" + ) as final_attributes: + returned_policy = client.set_iam_policy( + self.TABLE_REF, policy, fields=FIELDS, timeout=7.5 + ) + + final_attributes.assert_called_once_with({"path": PATH}, client, None) + + conn.api_request.assert_called_once_with( + method="POST", path=PATH, data=BODY, timeout=7.5 + ) + self.assertEqual(returned_policy.etag, ETAG) + self.assertEqual(returned_policy.version, VERSION) + self.assertEqual(dict(returned_policy), dict(policy)) + + def test_set_iam_policy_updateMask(self): + from google.cloud.bigquery.iam import BIGQUERY_DATA_OWNER_ROLE + from google.cloud.bigquery.iam import BIGQUERY_DATA_EDITOR_ROLE + from google.cloud.bigquery.iam import BIGQUERY_DATA_VIEWER_ROLE + from google.api_core.iam import Policy + PATH = "/projects/%s/datasets/%s/tables/%s:setIamPolicy" % ( self.PROJECT, self.DS_ID, @@ -1858,6 +1912,19 @@ def test_set_iam_policy_no_mask(self): method="POST", path=PATH, data=BODY, timeout=7.5 ) + def test_set_ia_policy_updateMask_and_fields(self): + from google.api_core.iam import Policy + + policy = Policy() + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + + with pytest.raises(ValueError, match="updateMask"): + client.set_iam_policy( + self.TABLE_REF, policy, updateMask="bindings", fields=("bindings",) + ) + def test_set_iam_policy_invalid_policy(self): from google.api_core.iam import Policy From c2496a1014a7d99e805b3d0a66e4517165bd7e01 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 27 Mar 2024 12:18:17 -0400 Subject: [PATCH 13/15] fix: updates a number of optional dependencies (#1864) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fix updates a number of optional dependencies. We use a different module import process (pytest.importorskip versus unittest.skipif). This first major commit gets the ball rolling, there are gonna be a few additional commits to cover other files. Fixes # 🦕 --- google/cloud/bigquery/_tqdm_helpers.py | 2 +- google/cloud/bigquery/client.py | 2 +- setup.py | 5 +- tests/system/test_client.py | 28 +- tests/unit/test_client.py | 172 +++++----- tests/unit/test_dbapi__helpers.py | 10 +- tests/unit/test_dbapi_connection.py | 28 +- tests/unit/test_dbapi_cursor.py | 33 +- tests/unit/test_table.py | 436 ++++++++++++------------- 9 files changed, 311 insertions(+), 405 deletions(-) diff --git a/google/cloud/bigquery/_tqdm_helpers.py b/google/cloud/bigquery/_tqdm_helpers.py index 456ca2530..cb81bd8f6 100644 --- a/google/cloud/bigquery/_tqdm_helpers.py +++ b/google/cloud/bigquery/_tqdm_helpers.py @@ -67,7 +67,7 @@ def get_progress_bar(progress_bar_type, description, total, unit): ) elif progress_bar_type == "tqdm_gui": return tqdm.tqdm_gui(desc=description, total=total, unit=unit) - except (KeyError, TypeError): + except (KeyError, TypeError): # pragma: NO COVER # Protect ourselves from any tqdm errors. In case of # unexpected tqdm behavior, just fall back to showing # no progress bar. diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index 5521e2e1e..891a54e5c 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -593,7 +593,7 @@ def _ensure_bqstorage_client( ) return None - if bqstorage_client is None: + if bqstorage_client is None: # pragma: NO COVER bqstorage_client = bigquery_storage.BigQueryReadClient( credentials=self._credentials, client_options=client_options, diff --git a/setup.py b/setup.py index 5a35f4136..ed9a6351b 100644 --- a/setup.py +++ b/setup.py @@ -45,8 +45,9 @@ ] pyarrow_dependency = "pyarrow >= 3.0.0" extras = { - # Keep the no-op bqstorage extra for backward compatibility. - # See: https://github.com/googleapis/python-bigquery/issues/757 + # bqstorage had a period where it was a required dependency, and has been + # moved back to optional due to bloat. See + # https://github.com/googleapis/python-bigquery/issues/1196 for more background. "bqstorage": [ "google-cloud-bigquery-storage >= 2.6.0, <3.0.0dev", # Due to an issue in pip's dependency resolver, the `grpc` extra is not diff --git a/tests/system/test_client.py b/tests/system/test_client.py index 414239323..862ef3245 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -54,16 +54,6 @@ from . import helpers -try: - from google.cloud import bigquery_storage -except ImportError: # pragma: NO COVER - bigquery_storage = None - -try: - import pyarrow - import pyarrow.types -except ImportError: # pragma: NO COVER - pyarrow = None JOB_TIMEOUT = 120 # 2 minutes DATA_PATH = pathlib.Path(__file__).parent.parent / "data" @@ -1772,11 +1762,10 @@ def test_dbapi_fetchall_from_script(self): row_tuples = [r.values() for r in rows] self.assertEqual(row_tuples, [(5, "foo"), (6, "bar"), (7, "baz")]) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_dbapi_fetch_w_bqstorage_client_large_result_set(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pyarrow") + bqstorage_client = bigquery_storage.BigQueryReadClient( credentials=Config.CLIENT._credentials ) @@ -1834,10 +1823,8 @@ def test_dbapi_dry_run_query(self): self.assertEqual(list(rows), []) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_dbapi_connection_does_not_leak_sockets(self): + pytest.importorskip("google.cloud.bigquery_storage") current_process = psutil.Process() conn_count_start = len(current_process.connections()) @@ -2382,11 +2369,10 @@ def test_create_table_rows_fetch_nested_schema(self): self.assertEqual(found[7], e_favtime) self.assertEqual(found[8], decimal.Decimal(expected["FavoriteNumber"])) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_nested_table_to_arrow(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pyarrow = pytest.importorskip("pyarrow") + pyarrow.types = pytest.importorskip("pyarrow.types") from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 60dcab85e..e9e74b06b 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -32,15 +32,6 @@ import packaging import pytest -try: - import importlib.metadata as metadata -except ImportError: - import importlib_metadata as metadata - -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None try: import opentelemetry @@ -59,11 +50,6 @@ msg = "Error importing from opentelemetry, is the installed version compatible?" raise ImportError(msg) from exc -try: - import pyarrow -except (ImportError, AttributeError): # pragma: NO COVER - pyarrow = None - import google.api_core.exceptions from google.api_core import client_info import google.cloud._helpers @@ -75,18 +61,9 @@ from google.cloud.bigquery.retry import DEFAULT_TIMEOUT import google.cloud.bigquery.table -try: - from google.cloud import bigquery_storage -except (ImportError, AttributeError): # pragma: NO COVER - bigquery_storage = None from test_utils.imports import maybe_fail_import from tests.unit.helpers import make_connection -if pandas is not None: - PANDAS_INSTALLED_VERSION = metadata.version("pandas") -else: - PANDAS_INSTALLED_VERSION = "0.0.0" - def _make_credentials(): import google.auth.credentials @@ -800,10 +777,9 @@ def test_get_dataset(self): self.assertEqual(dataset.dataset_id, self.DS_ID) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ensure_bqstorage_client_creating_new_instance(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + mock_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) mock_client_instance = object() mock_client.return_value = mock_client_instance @@ -849,10 +825,8 @@ def fail_bqstorage_import(name, globals, locals, fromlist, level): ] assert matching_warnings, "Missing dependency warning not raised." - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ensure_bqstorage_client_obsolete_dependency(self): + pytest.importorskip("google.cloud.bigquery_storage") creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -869,10 +843,8 @@ def test_ensure_bqstorage_client_obsolete_dependency(self): ] assert matching_warnings, "Obsolete dependency warning not raised." - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ensure_bqstorage_client_existing_client_check_passes(self): + pytest.importorskip("google.cloud.bigquery_storage") creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) mock_storage_client = mock.sentinel.mock_storage_client @@ -883,10 +855,23 @@ def test_ensure_bqstorage_client_existing_client_check_passes(self): self.assertIs(bqstorage_client, mock_storage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) + def test_ensure_bqstorage_client_is_none(self): + pytest.importorskip("google.cloud.bigquery_storage") + creds = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=creds) + bqstorage_client = None + + assert bqstorage_client is None + bqstorage_client = client._ensure_bqstorage_client( + bqstorage_client=bqstorage_client, + ) + + assert isinstance( + bqstorage_client, google.cloud.bigquery_storage_v1.BigQueryReadClient + ) + def test_ensure_bqstorage_client_existing_client_check_fails(self): + pytest.importorskip("google.cloud.bigquery_storage") creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) mock_storage_client = mock.sentinel.mock_storage_client @@ -972,8 +957,8 @@ def test_create_routine_w_conflict(self): timeout=DEFAULT_TIMEOUT, ) - @unittest.skipIf(opentelemetry is None, "Requires `opentelemetry`") def test_span_status_is_set(self): + pytest.importorskip("opentelemetry") from google.cloud.bigquery.routine import Routine tracer_provider = TracerProvider() @@ -6039,8 +6024,8 @@ def test_insert_rows_w_numeric(self): timeout=DEFAULT_TIMEOUT, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -6126,8 +6111,8 @@ def test_insert_rows_from_dataframe(self): ) assert call == expected_call - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_nan(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -6194,8 +6179,8 @@ def test_insert_rows_from_dataframe_nan(self): ) assert call == expected_call - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_many_columns(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -6247,8 +6232,8 @@ def test_insert_rows_from_dataframe_many_columns(self): assert len(actual_calls) == 1 assert actual_calls[0] == expected_call - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_w_explicit_none_insert_ids(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table @@ -7569,9 +7554,9 @@ def test_load_table_from_file_w_default_load_config(self): project=self.PROJECT, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import PolicyTagList, SchemaField @@ -7665,9 +7650,9 @@ def test_load_table_from_dataframe(self): # (not passed in via job_config) assert "description" not in field - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_client_location(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7710,9 +7695,9 @@ def test_load_table_from_dataframe_w_client_location(self): sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config_wihtout_source_format(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7765,9 +7750,9 @@ def test_load_table_from_dataframe_w_custom_job_config_wihtout_source_format(sel # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config_w_source_format(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7821,9 +7806,9 @@ def test_load_table_from_dataframe_w_custom_job_config_w_source_format(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_parquet_options_none(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7873,9 +7858,9 @@ def test_load_table_from_dataframe_w_parquet_options_none(self): sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.parquet_options.enable_list_inference is True - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_list_inference_none(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7933,9 +7918,9 @@ def test_load_table_from_dataframe_w_list_inference_none(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_explicit_job_config_override(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -7994,9 +7979,9 @@ def test_load_table_from_dataframe_w_explicit_job_config_override(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_default_load_config(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8044,9 +8029,9 @@ def test_load_table_from_dataframe_w_default_load_config(self): assert sent_config.write_disposition == job.WriteDisposition.WRITE_TRUNCATE assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_list_inference_false(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8105,9 +8090,9 @@ def test_load_table_from_dataframe_w_list_inference_false(self): # the original config object should not have been modified assert job_config.to_api_repr() == original_config_copy.to_api_repr() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config_w_wrong_source_format(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery import job client = self._make_client() @@ -8125,9 +8110,9 @@ def test_load_table_from_dataframe_w_custom_job_config_w_wrong_source_format(sel assert "Got unexpected source_format:" in str(exc.value) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_automatic_schema(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8226,9 +8211,9 @@ def test_load_table_from_dataframe_w_automatic_schema(self): SchemaField("time_col", "TIME"), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_automatic_schema_detection_fails(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -8286,9 +8271,9 @@ def test_load_table_from_dataframe_w_automatic_schema_detection_fails(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema is None - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_index_and_auto_schema(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8348,9 +8333,9 @@ def test_load_table_from_dataframe_w_index_and_auto_schema(self): ] assert sent_schema == expected_sent_schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_unknown_table(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES client = self._make_client() @@ -8384,9 +8369,9 @@ def test_load_table_from_dataframe_unknown_table(self): timeout=DEFAULT_TIMEOUT, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_nullable_int64_datatype(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8429,9 +8414,8 @@ def test_load_table_from_dataframe_w_nullable_int64_datatype(self): SchemaField("x", "INT64", "NULLABLE", None), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - # @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_nullable_int64_datatype_automatic_schema(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8474,9 +8458,9 @@ def test_load_table_from_dataframe_w_nullable_int64_datatype_automatic_schema(se SchemaField("x", "INT64", "NULLABLE", None), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_struct_fields(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8534,13 +8518,13 @@ def test_load_table_from_dataframe_struct_fields(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema == schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_array_fields(self): """Test that a DataFrame with array columns can be uploaded correctly. See: https://github.com/googleapis/python-bigquery/issues/19 """ + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8599,13 +8583,13 @@ def test_load_table_from_dataframe_array_fields(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema == schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_array_fields_w_auto_schema(self): """Test that a DataFrame with array columns can be uploaded correctly. See: https://github.com/googleapis/python-bigquery/issues/19 """ + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8662,9 +8646,9 @@ def test_load_table_from_dataframe_array_fields_w_auto_schema(self): assert sent_config.source_format == job.SourceFormat.PARQUET assert sent_config.schema == expected_schema - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_partial_schema(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8746,9 +8730,9 @@ def test_load_table_from_dataframe_w_partial_schema(self): SchemaField("bytes_col", "BYTES"), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_partial_schema_extra_types(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8783,9 +8767,9 @@ def test_load_table_from_dataframe_w_partial_schema_extra_types(self): assert "bq_schema contains fields not present in dataframe" in message assert "unknown_col" in message - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_schema_arrow_custom_compression(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8816,9 +8800,9 @@ def test_load_table_from_dataframe_w_schema_arrow_custom_compression(self): assert call_args is not None assert call_args.get("parquet_compression") == "LZ4" - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_wo_pyarrow_raises_error(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") client = self._make_client() records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] dataframe = pandas.DataFrame(records) @@ -8846,8 +8830,8 @@ def test_load_table_from_dataframe_wo_pyarrow_raises_error(self): ) def test_load_table_from_dataframe_w_bad_pyarrow_issues_warning(self): - pytest.importorskip("pandas", reason="Requires `pandas`") - pytest.importorskip("pyarrow", reason="Requires `pyarrow`") + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") client = self._make_client() records = [{"id": 1, "age": 100}, {"id": 2, "age": 60}] @@ -8874,14 +8858,14 @@ def test_load_table_from_dataframe_w_bad_pyarrow_issues_warning(self): location=self.LOCATION, ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_nulls(self): """Test that a DataFrame with null columns can be uploaded if a BigQuery schema is specified. See: https://github.com/googleapis/google-cloud-python/issues/7370 """ + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -8919,8 +8903,8 @@ def test_load_table_from_dataframe_w_nulls(self): assert sent_config.schema == schema assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_load_table_from_dataframe_w_invaild_job_config(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery import job client = self._make_client() @@ -8937,8 +8921,8 @@ def test_load_table_from_dataframe_w_invaild_job_config(self): err_msg = str(exc.value) assert "Expected an instance of LoadJobConfig" in err_msg - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_load_table_from_dataframe_with_csv_source_format(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField @@ -8987,9 +8971,9 @@ def test_load_table_from_dataframe_with_csv_source_format(self): sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.source_format == job.SourceFormat.CSV - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_higher_scale_decimal128_datatype(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job from google.cloud.bigquery.schema import SchemaField diff --git a/tests/unit/test_dbapi__helpers.py b/tests/unit/test_dbapi__helpers.py index 542f923d2..7e1da0034 100644 --- a/tests/unit/test_dbapi__helpers.py +++ b/tests/unit/test_dbapi__helpers.py @@ -21,16 +21,10 @@ import pytest -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None - import google.cloud._helpers from google.cloud.bigquery import query, table from google.cloud.bigquery.dbapi import _helpers from google.cloud.bigquery.dbapi import exceptions -from tests.unit.helpers import _to_pyarrow class TestQueryParameters(unittest.TestCase): @@ -215,8 +209,10 @@ def test_empty_iterable(self): result = _helpers.to_bq_table_rows(rows_iterable) self.assertEqual(list(result), []) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_non_empty_iterable(self): + pytest.importorskip("pyarrow") + from tests.unit.helpers import _to_pyarrow + rows_iterable = [ dict( one=_to_pyarrow(1.1), diff --git a/tests/unit/test_dbapi_connection.py b/tests/unit/test_dbapi_connection.py index 88378ec98..4071e57e0 100644 --- a/tests/unit/test_dbapi_connection.py +++ b/tests/unit/test_dbapi_connection.py @@ -13,14 +13,10 @@ # limitations under the License. import gc +import pytest import unittest from unittest import mock -try: - from google.cloud import bigquery_storage -except ImportError: # pragma: NO COVER - bigquery_storage = None - class TestConnection(unittest.TestCase): @staticmethod @@ -41,6 +37,8 @@ def _mock_client(self): def _mock_bqstorage_client(self): # Assumption: bigquery_storage exists. It's the test's responisbility to # not use this helper or skip itself if bqstorage is not installed. + from google.cloud import bigquery_storage + mock_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) mock_client._transport = mock.Mock(spec=["channel"]) mock_client._transport.grpc_channel = mock.Mock(spec=["close"]) @@ -57,10 +55,8 @@ def test_ctor_wo_bqstorage_client(self): self.assertIs(connection._client, mock_client) self.assertIs(connection._bqstorage_client, None) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_ctor_w_bqstorage_client(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.dbapi import Connection mock_client = self._mock_client() @@ -89,10 +85,8 @@ def test_connect_wo_client(self, mock_client): self.assertIsNotNone(connection._client) self.assertIsNotNone(connection._bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_connect_w_client(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Connection @@ -107,10 +101,8 @@ def test_connect_w_client(self): self.assertIs(connection._client, mock_client) self.assertIs(connection._bqstorage_client, mock_bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_connect_w_both_clients(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Connection @@ -143,10 +135,8 @@ def test_raises_error_if_closed(self): ): getattr(connection, method)() - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_close_closes_all_created_bigquery_clients(self): + pytest.importorskip("google.cloud.bigquery_storage") client = self._mock_client() bqstorage_client = self._mock_bqstorage_client() @@ -168,10 +158,8 @@ def test_close_closes_all_created_bigquery_clients(self): self.assertTrue(client.close.called) self.assertTrue(bqstorage_client._transport.grpc_channel.close.called) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_close_does_not_close_bigquery_clients_passed_to_it(self): + pytest.importorskip("google.cloud.bigquery_storage") client = self._mock_client() bqstorage_client = self._mock_bqstorage_client() connection = self._make_one(client=client, bqstorage_client=bqstorage_client) diff --git a/tests/unit/test_dbapi_cursor.py b/tests/unit/test_dbapi_cursor.py index e9fd2e3dd..6fca4cec0 100644 --- a/tests/unit/test_dbapi_cursor.py +++ b/tests/unit/test_dbapi_cursor.py @@ -21,18 +21,8 @@ import google.cloud.bigquery.table as bq_table -try: - import pyarrow -except ImportError: # pragma: NO COVER - pyarrow = None - from google.api_core import exceptions -try: - from google.cloud import bigquery_storage -except ImportError: # pragma: NO COVER - bigquery_storage = None - from tests.unit.helpers import _to_pyarrow @@ -97,6 +87,8 @@ def _mock_client( return mock_client def _mock_bqstorage_client(self, rows=None, stream_count=0): + from google.cloud import bigquery_storage + if rows is None: rows = [] @@ -320,11 +312,9 @@ def test_fetchall_w_row(self): self.assertEqual(len(rows), 1) self.assertEqual(rows[0], (1,)) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_fetchall_w_bqstorage_client_fetch_success(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pyarrow") from google.cloud.bigquery import dbapi # use unordered data to also test any non-determenistic key order in dicts @@ -380,10 +370,8 @@ def test_fetchall_w_bqstorage_client_fetch_success(self): self.assertEqual(sorted_row_data, expected_row_data) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_fetchall_w_bqstorage_client_fetch_no_rows(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import dbapi mock_client = self._mock_client( @@ -410,10 +398,8 @@ def test_fetchall_w_bqstorage_client_fetch_no_rows(self): # check the data returned self.assertEqual(rows, []) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_fetchall_w_bqstorage_client_fetch_error_no_fallback(self): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import dbapi row_data = [bq_table.Row([1.1, 1.2], {"foo": 0, "bar": 1})] @@ -448,11 +434,10 @@ def fake_ensure_bqstorage_client(bqstorage_client=None, **kwargs): # the default client was not used mock_client.list_rows.assert_not_called() - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_fetchall_w_bqstorage_client_no_arrow_compression(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pyarrow") + from google.cloud import bigquery_storage from google.cloud.bigquery import dbapi # Use unordered data to also test any non-determenistic key order in dicts. diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py index a8107ee97..dbc5948b8 100644 --- a/tests/unit/test_table.py +++ b/tests/unit/test_table.py @@ -24,11 +24,6 @@ import pytest -try: - import importlib.metadata as metadata -except ImportError: - import importlib_metadata as metadata - import google.api_core.exceptions from test_utils.imports import maybe_fail_import @@ -37,48 +32,6 @@ from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.dataset import DatasetReference -try: - from google.cloud import bigquery_storage - from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( - grpc as big_query_read_grpc_transport, - ) -except ImportError: # pragma: NO COVER - bigquery_storage = None - big_query_read_grpc_transport = None - - -pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import() - -if pyarrow: # pragma: NO COVER - import pyarrow.types - -try: - import pandas -except (ImportError, AttributeError): # pragma: NO COVER - pandas = None - -try: - import db_dtypes # type: ignore -except ImportError: # pragma: NO COVER - db_dtypes = None - -try: - import geopandas -except (ImportError, AttributeError): # pragma: NO COVER - geopandas = None - -try: - import tqdm - from tqdm.std import TqdmDeprecationWarning - -except (ImportError, AttributeError): # pragma: NO COVER - tqdm = None - -if pandas is not None: - PANDAS_INSTALLED_VERSION = metadata.version("pandas") -else: - PANDAS_INSTALLED_VERSION = "0.0.0" - def _mock_client(): from google.cloud.bigquery import client @@ -1948,6 +1901,8 @@ def test_row(self): class Test_EmptyRowIterator(unittest.TestCase): + PYARROW_MINIMUM_VERSION = str(_versions_helpers._MIN_PYARROW_VERSION) + def _make_one(self): from google.cloud.bigquery.table import _EmptyRowIterator @@ -1963,15 +1918,17 @@ def test_to_arrow_error_if_pyarrow_is_none(self): with self.assertRaises(ValueError): row_iterator.to_arrow() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow(self): + pyarrow = pytest.importorskip("pyarrow") row_iterator = self._make_one() tbl = row_iterator.to_arrow() self.assertIsInstance(tbl, pyarrow.Table) self.assertEqual(tbl.num_rows, 0) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_iterable(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) row_iterator = self._make_one() arrow_iter = row_iterator.to_arrow_iterable() @@ -1989,8 +1946,8 @@ def test_to_dataframe_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): + pandas = pytest.importorskip("pandas") row_iterator = self._make_one() df = row_iterator.to_dataframe(create_bqstorage_client=False) self.assertIsInstance(df, pandas.DataFrame) @@ -2002,8 +1959,8 @@ def test_to_dataframe_iterable_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe_iterable() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable(self): + pandas = pytest.importorskip("pandas") row_iterator = self._make_one() df_iter = row_iterator.to_dataframe_iterable() @@ -2027,8 +1984,8 @@ def test_to_geodataframe_if_geopandas_is_none(self): ): row_iterator.to_geodataframe(create_bqstorage_client=False) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe(self): + geopandas = pytest.importorskip("geopandas") row_iterator = self._make_one() df = row_iterator.to_geodataframe(create_bqstorage_client=False) self.assertIsInstance(df, geopandas.GeoDataFrame) @@ -2040,6 +1997,8 @@ def test_to_geodataframe(self): class TestRowIterator(unittest.TestCase): + PYARROW_MINIMUM_VERSION = str(_versions_helpers._MIN_PYARROW_VERSION) + def _class_under_test(self): from google.cloud.bigquery.table import RowIterator @@ -2367,10 +2326,8 @@ def test__should_use_bqstorage_returns_false_when_completely_cached(self): ) ) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test__should_use_bqstorage_returns_true_if_no_cached_results(self): + pytest.importorskip("google.cloud.bigquery_storage") iterator = self._make_one(first_page_response=None) # not cached result = iterator._should_use_bqstorage( bqstorage_client=None, create_bqstorage_client=True @@ -2413,10 +2370,8 @@ def fail_bqstorage_import(name, globals, locals, fromlist, level): self.assertFalse(result) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test__should_use_bqstorage_returns_false_w_warning_if_obsolete_version(self): + pytest.importorskip("google.cloud.bigquery_storage") iterator = self._make_one(first_page_response=None) # not cached patcher = mock.patch( @@ -2435,8 +2390,10 @@ def test__should_use_bqstorage_returns_false_w_warning_if_obsolete_version(self) ] assert matching_warnings, "Obsolete dependency warning not raised." - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_iterable(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2536,14 +2493,17 @@ def test_to_arrow_iterable(self): [[{"name": "Bepples Phlyntstone", "age": 0}, {"name": "Dino", "age": 4}]], ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_iterable_w_bqstorage(self): + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) + from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut - from google.cloud.bigquery_storage_v1 import reader bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) bqstorage_client._transport = mock.create_autospec( @@ -2615,8 +2575,10 @@ def test_to_arrow_iterable_w_bqstorage(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2697,8 +2659,11 @@ def test_to_arrow(self): ], ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_nulls(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) + import pyarrow.types from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")] @@ -2730,8 +2695,10 @@ def test_to_arrow_w_nulls(self): self.assertEqual(names, ["Donkey", "Diddy", "Dixie", None]) self.assertEqual(ages, [32, 29, None, 111]) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_unknown_type(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2773,8 +2740,10 @@ def test_to_arrow_w_unknown_type(self): warning = warned[0] self.assertTrue("sport" in str(warning)) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_empty_table(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2812,11 +2781,9 @@ def test_to_arrow_w_empty_table(self): self.assertEqual(child_field.type.value_type[0].name, "name") self.assertEqual(child_field.type.value_type[1].name, "age") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self): + pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2856,11 +2823,9 @@ def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self): ) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self): + pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -2896,14 +2861,16 @@ def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self): self.assertFalse(matches) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_w_bqstorage(self): + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) bqstorage_client._transport = mock.create_autospec( @@ -2977,13 +2944,15 @@ def test_to_arrow_w_bqstorage(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_w_bqstorage_creates_client(self): + pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) mock_client = _mock_client() bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -3008,8 +2977,10 @@ def test_to_arrow_w_bqstorage_creates_client(self): mock_client._ensure_bqstorage_client.assert_called_once() bqstorage_client._transport.grpc_channel.close.assert_called_once() - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_ensure_bqstorage_client_wo_bqstorage(self): + pyarrow = pytest.importorskip( + "pyarrow", minversion=self.PYARROW_MINIMUM_VERSION + ) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3039,13 +3010,12 @@ def mock_verify_version(raise_if_error: bool = False): self.assertIsInstance(tbl, pyarrow.Table) self.assertEqual(tbl.num_rows, 2) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_arrow_w_bqstorage_no_streams(self): + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() @@ -3079,12 +3049,10 @@ def test_to_arrow_w_bqstorage_no_streams(self): self.assertEqual(actual_table.schema[1].name, "colC") self.assertEqual(actual_table.schema[2].name, "colB") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.notebook.tqdm") - @mock.patch("tqdm.tqdm") - def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock): + def test_to_arrow_progress_bar(self): + pytest.importorskip("pyarrow") + pytest.importorskip("tqdm") + pytest.importorskip("tqdm.notebook") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3101,12 +3069,13 @@ def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_moc api_request = mock.Mock(return_value={"rows": rows}) progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), + ("tqdm", mock.patch("tqdm.tqdm")), + ("tqdm_notebook", mock.patch("tqdm.notebook.tqdm")), + ("tqdm_gui", mock.patch("tqdm.tqdm_gui")), ) - for progress_bar_type, progress_bar_mock in progress_bars: + for progress_bar_type, bar_patch in progress_bars: + progress_bar_mock = bar_patch.start() row_iterator = self._make_one(_mock_client(), api_request, path, schema) tbl = row_iterator.to_arrow( progress_bar_type=progress_bar_type, @@ -3129,8 +3098,8 @@ def test_to_arrow_w_pyarrow_none(self): with self.assertRaises(ValueError): row_iterator.to_arrow() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3171,8 +3140,8 @@ def test_to_dataframe_iterable(self): self.assertEqual(df_2["name"][0], "Sven") self.assertEqual(df_2["age"][0], 33) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable_with_dtypes(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3213,15 +3182,17 @@ def test_to_dataframe_iterable_with_dtypes(self): self.assertEqual(df_2["name"][0], "Sven") self.assertEqual(df_2["age"][0], 33) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_iterable_w_bqstorage(self): + pandas = pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) arrow_fields = [ pyarrow.field("colA", pyarrow.int64()), @@ -3285,13 +3256,12 @@ def test_to_dataframe_iterable_w_bqstorage(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_iterable_w_bqstorage_max_results_warning(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -3358,8 +3328,8 @@ def test_to_dataframe_iterable_error_if_pandas_is_none(self): with pytest.raises(ValueError, match="pandas"): row_iterator.to_dataframe_iterable() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3384,9 +3354,9 @@ def test_to_dataframe(self): self.assertEqual(df.name.dtype.name, "object") self.assertEqual(df.age.dtype.name, "Int64") - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_timestamp_out_of_pyarrow_bounds(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("some_timestamp", "TIMESTAMP")] @@ -3412,9 +3382,9 @@ def test_to_dataframe_timestamp_out_of_pyarrow_bounds(self): ], ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_datetime_out_of_pyarrow_bounds(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("some_datetime", "DATETIME")] @@ -3436,14 +3406,10 @@ def test_to_dataframe_datetime_out_of_pyarrow_bounds(self): [datetime.datetime(4567, 1, 1), datetime.datetime(9999, 12, 31)], ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui") - @mock.patch("tqdm.notebook.tqdm") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_progress_bar( - self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock - ): + def test_to_dataframe_progress_bar(self): + pytest.importorskip("pandas") + pytest.importorskip("tqdm") + from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3460,12 +3426,13 @@ def test_to_dataframe_progress_bar( api_request = mock.Mock(return_value={"rows": rows}) progress_bars = ( - ("tqdm", tqdm_mock), - ("tqdm_notebook", tqdm_notebook_mock), - ("tqdm_gui", tqdm_gui_mock), + ("tqdm", mock.patch("tqdm.tqdm")), + ("tqdm_notebook", mock.patch("tqdm.notebook.tqdm")), + ("tqdm_gui", mock.patch("tqdm.tqdm_gui")), ) - for progress_bar_type, progress_bar_mock in progress_bars: + for progress_bar_type, bar_patch in progress_bars: + progress_bar_mock = bar_patch.start() row_iterator = self._make_one(_mock_client(), api_request, path, schema) df = row_iterator.to_dataframe( progress_bar_type=progress_bar_type, @@ -3477,9 +3444,9 @@ def test_to_dataframe_progress_bar( progress_bar_mock().close.assert_called_once() self.assertEqual(len(df), 4) - @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery._tqdm_helpers.tqdm", new=None) def test_to_dataframe_no_tqdm_no_progress_bar(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3505,9 +3472,9 @@ def test_to_dataframe_no_tqdm_no_progress_bar(self): self.assertEqual(len(user_warnings), 0) self.assertEqual(len(df), 4) - @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery._tqdm_helpers.tqdm", new=None) def test_to_dataframe_no_tqdm(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3539,12 +3506,12 @@ def test_to_dataframe_no_tqdm(self): # should still work. self.assertEqual(len(df), 4) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm_gui", new=None) # will raise TypeError on call - @mock.patch("tqdm.notebook.tqdm", new=None) # will raise TypeError on call - @mock.patch("tqdm.tqdm", new=None) # will raise TypeError on call def test_to_dataframe_tqdm_error(self): + pytest.importorskip("pandas") + pytest.importorskip("tqdm") + mock.patch("tqdm.tqdm_gui", new=None) + mock.patch("tqdm.notebook.tqdm", new=None) + mock.patch("tqdm.tqdm", new=None) from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3573,14 +3540,14 @@ def test_to_dataframe_tqdm_error(self): # Warn that a progress bar was requested, but creating the tqdm # progress bar failed. - for warning in warned: + for warning in warned: # pragma: NO COVER self.assertIn( warning.category, - [UserWarning, DeprecationWarning, TqdmDeprecationWarning], + [UserWarning, DeprecationWarning], ) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_empty_results(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3596,8 +3563,8 @@ def test_to_dataframe_w_empty_results(self): self.assertEqual(len(df), 0) # verify the number of rows self.assertEqual(list(df), ["name", "age"]) # verify the column names - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_various_types_nullable(self): + pandas = pytest.importorskip("pandas") import datetime from google.cloud.bigquery.schema import SchemaField @@ -3637,8 +3604,9 @@ def test_to_dataframe_w_various_types_nullable(self): self.assertIsInstance(row.complete, bool) self.assertIsInstance(row.date, datetime.date) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_dtypes_mapper(self): + pandas = pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3832,9 +3800,11 @@ def test_to_dataframe_w_dtypes_mapper(self): ) self.assertEqual(df.timestamp.dtype.name, "object") - @unittest.skipIf(pandas is None, "Requires `pandas`") - @pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="") def test_to_dataframe_w_none_dtypes_mapper(self): + pandas = pytest.importorskip("pandas") + pandas_major_version = pandas.__version__[0:2] + if pandas_major_version not in ["0.", "1."]: + pytest.skip(reason="Requires a version of pandas less than 2.0") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3888,8 +3858,8 @@ def test_to_dataframe_w_none_dtypes_mapper(self): self.assertEqual(df.time.dtype.name, "object") self.assertEqual(df.timestamp.dtype.name, "datetime64[ns, UTC]") - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_unsupported_dtypes_mapper(self): + pytest.importorskip("pandas") import numpy from google.cloud.bigquery.schema import SchemaField @@ -3945,9 +3915,11 @@ def test_to_dataframe_w_unsupported_dtypes_mapper(self): timestamp_dtype=numpy.dtype("datetime64[us]"), ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="") def test_to_dataframe_column_dtypes(self): + pandas = pytest.importorskip("pandas") + pandas_major_version = pandas.__version__[0:2] + if pandas_major_version not in ["0.", "1."]: + pytest.skip("Requires a version of pandas less than 2.0") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -3960,9 +3932,9 @@ def test_to_dataframe_column_dtypes(self): SchemaField("date", "DATE"), ] row_data = [ - ["1433836800000000", "420", "1.1", "1.77", "Cash", "true", "1999-12-01"], + ["1433836800000", "420", "1.1", "1.77", "Cash", "true", "1999-12-01"], [ - "1387811700000000", + "1387811700000", "2580", "17.7", "28.5", @@ -3970,7 +3942,7 @@ def test_to_dataframe_column_dtypes(self): "false", "1953-06-14", ], - ["1385565300000000", "2280", "4.4", "7.1", "Credit", "true", "1981-11-04"], + ["1385565300000", "2280", "4.4", "7.1", "Credit", "true", "1981-11-04"], ] rows = [{"f": [{"v": field} for field in row]} for row in row_data] path = "/foo" @@ -3995,13 +3967,12 @@ def test_to_dataframe_column_dtypes(self): self.assertEqual(df.complete.dtype.name, "boolean") self.assertEqual(df.date.dtype.name, "dbdate") - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_datetime_objects(self): # When converting date or timestamp values to nanosecond # precision, the result can be out of pyarrow bounds. To avoid # the error when converting to Pandas, we use object type if # necessary. - + pandas = pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4044,9 +4015,10 @@ def test_to_dataframe_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe() - @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery.table.shapely", new=None) def test_to_dataframe_error_if_shapely_is_none(self): + pytest.importorskip("pandas") + with self.assertRaisesRegex( ValueError, re.escape( @@ -4056,8 +4028,9 @@ def test_to_dataframe_error_if_shapely_is_none(self): ): self._make_one_from_data().to_dataframe(geography_as_object=True) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_bqstorage_warning(self): + pytest.importorskip("pandas") + from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4092,8 +4065,8 @@ def test_to_dataframe_max_results_w_bqstorage_warning(self): ] self.assertEqual(len(matches), 1, msg="User warning was not emitted.") - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_explicit_bqstorage_client_warning(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4133,8 +4106,8 @@ def test_to_dataframe_max_results_w_explicit_bqstorage_client_warning(self): ) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning(self): + pytest.importorskip("pandas") from google.cloud.bigquery.schema import SchemaField schema = [ @@ -4170,13 +4143,15 @@ def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning(self): self.assertFalse(matches) mock_client._ensure_bqstorage_client.assert_not_called() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_creates_client(self): + pytest.importorskip("pandas") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) mock_client = _mock_client() bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -4201,13 +4176,12 @@ def test_to_dataframe_w_bqstorage_creates_client(self): mock_client._ensure_bqstorage_client.assert_called_once() bqstorage_client._transport.grpc_channel.close.assert_called_once() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_no_streams(self): + pytest.importorskip("pandas") + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() @@ -4230,13 +4204,12 @@ def test_to_dataframe_w_bqstorage_no_streams(self): self.assertEqual(list(got), column_names) self.assertTrue(got.empty) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_logs_session(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pytest.importorskip("pyarrow") from google.cloud.bigquery.table import Table + from google.cloud import bigquery_storage bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() @@ -4255,12 +4228,11 @@ def test_to_dataframe_w_bqstorage_logs_session(self): "with BQ Storage API session 'projects/test-proj/locations/us/sessions/SOMESESSION'." ) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_empty_streams(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + from google.cloud import bigquery_storage from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4310,15 +4282,17 @@ def test_to_dataframe_w_bqstorage_empty_streams(self): self.assertEqual(list(got), column_names) self.assertTrue(got.empty) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_nonempty(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut + from google.cloud import bigquery_storage from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) arrow_fields = [ pyarrow.field("colA", pyarrow.int64()), @@ -4390,12 +4364,10 @@ def test_to_dataframe_w_bqstorage_nonempty(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4444,14 +4416,11 @@ def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self): self.assertEqual(len(got.index), total_rows) self.assertTrue(got.index.is_unique) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - @unittest.skipIf(tqdm is None, "Requires `tqdm`") - @mock.patch("tqdm.tqdm") - def test_to_dataframe_w_bqstorage_updates_progress_bar(self, tqdm_mock): + def test_to_dataframe_w_bqstorage_updates_progress_bar(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + pytest.importorskip("tqdm") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4507,28 +4476,27 @@ def blocking_to_arrow(*args, **kwargs): selected_fields=schema, ) - row_iterator.to_dataframe( - bqstorage_client=bqstorage_client, progress_bar_type="tqdm" - ) + with mock.patch("tqdm.tqdm") as tqdm_mock: + row_iterator.to_dataframe( + bqstorage_client=bqstorage_client, progress_bar_type="tqdm" + ) + + # Make sure that this test updated the progress bar once per page from + # each stream. + total_pages = len(streams) * len(mock_pages) + expected_total_rows = total_pages * len(page_items) + progress_updates = [ + args[0] for args, kwargs in tqdm_mock().update.call_args_list + ] + # Should have sent >1 update due to delay in blocking_to_arrow. + self.assertGreater(len(progress_updates), 1) + self.assertEqual(sum(progress_updates), expected_total_rows) + tqdm_mock().close.assert_called_once() - # Make sure that this test updated the progress bar once per page from - # each stream. - total_pages = len(streams) * len(mock_pages) - expected_total_rows = total_pages * len(page_items) - progress_updates = [ - args[0] for args, kwargs in tqdm_mock().update.call_args_list - ] - # Should have sent >1 update due to delay in blocking_to_arrow. - self.assertGreater(len(progress_updates), 1) - self.assertEqual(sum(progress_updates), expected_total_rows) - tqdm_mock().close.assert_called_once() - - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_w_bqstorage_exits_on_keyboardinterrupt(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader @@ -4611,8 +4579,8 @@ def blocking_to_arrow(*args, **kwargs): # should have been set. self.assertLessEqual(mock_page.to_dataframe.call_count, 2) - @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_tabledata_list_w_multiple_pages_return_unique_index(self): + pandas = pytest.importorskip("pandas") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut @@ -4643,11 +4611,10 @@ def test_to_dataframe_tabledata_list_w_multiple_pages_return_unique_index(self): self.assertEqual(df.name.dtype.name, "object") self.assertTrue(df.index.is_unique) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_raises_auth_error(self): + pytest.importorskip("google.cloud.bigquery_storage") + pytest.importorskip("pandas") + from google.cloud import bigquery_storage from google.cloud.bigquery import table as mut bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -4665,10 +4632,8 @@ def test_to_dataframe_w_bqstorage_raises_auth_error(self): with pytest.raises(google.api_core.exceptions.Forbidden): row_iterator.to_dataframe(bqstorage_client=bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_partition(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut @@ -4685,10 +4650,8 @@ def test_to_dataframe_w_bqstorage_partition(self): with pytest.raises(ValueError): row_iterator.to_dataframe(bqstorage_client) - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) def test_to_dataframe_w_bqstorage_snapshot(self): + bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut @@ -4705,15 +4668,17 @@ def test_to_dataframe_w_bqstorage_snapshot(self): with pytest.raises(ValueError): row_iterator.to_dataframe(bqstorage_client) - @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf( - bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" - ) - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self): + pytest.importorskip("google.cloud.bigquery_storage") + pandas = pytest.importorskip("pandas") + pyarrow = pytest.importorskip("pyarrow") + from google.cloud import bigquery_storage from google.cloud.bigquery import schema from google.cloud.bigquery import table as mut from google.cloud.bigquery_storage_v1 import reader + from google.cloud.bigquery_storage_v1.services.big_query_read.transports import ( + grpc as big_query_read_grpc_transport, + ) arrow_fields = [ # Not alphabetical to test column order. @@ -4818,8 +4783,9 @@ def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self): # Don't close the client if it was passed in. bqstorage_client._transport.grpc_channel.close.assert_not_called() - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_dataframe_geography_as_object(self): + pandas = pytest.importorskip("pandas") + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY")), ( @@ -4853,8 +4819,8 @@ def test_to_geodataframe_error_if_geopandas_is_none(self): ): self._make_one_from_data().to_geodataframe() - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe(self): + geopandas = pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY")), ( @@ -4883,8 +4849,8 @@ def test_to_geodataframe(self): self.assertEqual(df.geog.crs.srs, "EPSG:4326") self.assertEqual(df.geog.crs.name, "WGS 84") - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_ambiguous_geog(self): + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY"), ("geog2", "GEOGRAPHY")), () ) @@ -4898,8 +4864,8 @@ def test_to_geodataframe_ambiguous_geog(self): ): row_iterator.to_geodataframe(create_bqstorage_client=False) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_bad_geography_column(self): + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY"), ("geog2", "GEOGRAPHY")), () ) @@ -4914,8 +4880,8 @@ def test_to_geodataframe_bad_geography_column(self): create_bqstorage_client=False, geography_column="xxx" ) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_no_geog(self): + pytest.importorskip("geopandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "STRING")), () ) @@ -4928,8 +4894,9 @@ def test_to_geodataframe_no_geog(self): ): row_iterator.to_geodataframe(create_bqstorage_client=False) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") def test_to_geodataframe_w_geography_column(self): + geopandas = pytest.importorskip("geopandas") + pandas = pytest.importorskip("pandas") row_iterator = self._make_one_from_data( (("name", "STRING"), ("geog", "GEOGRAPHY"), ("geog2", "GEOGRAPHY")), ( @@ -4974,7 +4941,6 @@ def test_to_geodataframe_w_geography_column(self): ["0.0", "0.0", "0.0"], ) - @unittest.skipIf(geopandas is None, "Requires `geopandas`") @mock.patch("google.cloud.bigquery.table.RowIterator.to_dataframe") def test_rowiterator_to_geodataframe_delegation(self, to_dataframe): """ @@ -4983,6 +4949,8 @@ def test_rowiterator_to_geodataframe_delegation(self, to_dataframe): This test just demonstrates that. We don't need to test all the variations, which are tested for to_dataframe. """ + pandas = pytest.importorskip("pandas") + geopandas = pytest.importorskip("geopandas") import numpy from shapely import wkt @@ -5676,9 +5644,6 @@ def test_from_api_repr_only_foreign_keys_resource(self): self.assertIsNotNone(instance.foreign_keys) -@pytest.mark.skipif( - bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" -) @pytest.mark.parametrize( "table_path", ( @@ -5689,6 +5654,7 @@ def test_from_api_repr_only_foreign_keys_resource(self): ), ) def test_table_reference_to_bqstorage_v1_stable(table_path): + pytest.importorskip("google.cloud.bigquery_storage") from google.cloud.bigquery import table as mut expected = "projects/my-project/datasets/my_dataset/tables/my_table" From 1e71bc87263c05cd153e96a3559d688ba0fe3825 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 27 Mar 2024 18:26:43 +0100 Subject: [PATCH 14/15] chore(deps): update all dependencies (#1873) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update samples/geography/requirements.txt * Update samples/geography/requirements.txt * Update samples/geography/requirements.txt --------- Co-authored-by: Owl Bot Co-authored-by: Chalmer Lowe --- samples/geography/requirements.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt index 6fa7ffc7e..bdaead5b1 100644 --- a/samples/geography/requirements.txt +++ b/samples/geography/requirements.txt @@ -32,8 +32,10 @@ pandas==2.2.1; python_version >= '3.9' proto-plus==1.23.0 pyarrow==12.0.1; python_version == '3.7' pyarrow==15.0.2; python_version >= '3.8' -pyasn1==0.5.1 -pyasn1-modules==0.3.0 +pyasn1==0.5.1; python_version == '3.7' +pyasn1==0.6.0; python_version >= '3.8' +pyasn1-modules==0.3.0; python_version == '3.7' +pyasn1-modules==0.4.0; python_version >= '3.8' pycparser==2.21 pyparsing==3.1.2 python-dateutil==2.9.0.post0 From 7dfee0c585d2a3781ffc6e769c7c8bbe4dbe9714 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 27 Mar 2024 14:39:38 -0700 Subject: [PATCH 15/15] chore(main): release 3.20.0 (#1850) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Tim Sweña (Swast) --- CHANGELOG.md | 15 +++++++++++++++ google/cloud/bigquery/version.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cb0e1d20..578df101f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,21 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## [3.20.0](https://github.com/googleapis/python-bigquery/compare/v3.19.0...v3.20.0) (2024-03-27) + + +### Features + +* Add `fields` parameter to `set_iam_policy` for consistency with update methods ([#1872](https://github.com/googleapis/python-bigquery/issues/1872)) ([08b1e6f](https://github.com/googleapis/python-bigquery/commit/08b1e6f9c41121907c345daedbae40ece18e8b6a)) + + +### Bug Fixes + +* Correct type checking ([#1848](https://github.com/googleapis/python-bigquery/issues/1848)) ([2660dbd](https://github.com/googleapis/python-bigquery/commit/2660dbd4821a89a1e20e3e1541504a409f1979aa)) +* Update error logging when converting to pyarrow column fails ([#1836](https://github.com/googleapis/python-bigquery/issues/1836)) ([0ac6e9b](https://github.com/googleapis/python-bigquery/commit/0ac6e9bf186945832f5dcdf5a4d95667b4da223e)) +* Updates a number of optional dependencies ([#1864](https://github.com/googleapis/python-bigquery/issues/1864)) ([c2496a1](https://github.com/googleapis/python-bigquery/commit/c2496a1014a7d99e805b3d0a66e4517165bd7e01)) +* Use an allowlist instead of denylist to determine when `query_and_wait` uses `jobs.query` API ([#1869](https://github.com/googleapis/python-bigquery/issues/1869)) ([e265db6](https://github.com/googleapis/python-bigquery/commit/e265db6a6a37d13056dcaac240c2cf3975dfd644)) + ## [3.19.0](https://github.com/googleapis/python-bigquery/compare/v3.18.0...v3.19.0) (2024-03-11) diff --git a/google/cloud/bigquery/version.py b/google/cloud/bigquery/version.py index 27f24bd19..4537b8250 100644 --- a/google/cloud/bigquery/version.py +++ b/google/cloud/bigquery/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "3.19.0" +__version__ = "3.20.0"