diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 4c0027f..3f7634f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a -# created: 2025-01-16T15:24:11.364245182Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 2386b3a..d919b44 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -5,7 +5,10 @@ on: name: unittest jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: python: ['3.8', '3.9', '3.10', '3.11', '3.12'] diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 2731d6f..d41b45a 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-db-dtypes-pandas" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 006d8ef..6ad95a0 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -112,34 +112,38 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 +cryptography==44.0.1 \ + --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \ + --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \ + --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \ + --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \ + --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \ + --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \ + --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \ + --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \ + --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \ + --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \ + --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \ + --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \ + --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \ + --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \ + --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \ + --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \ + --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \ + --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \ + --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \ + --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \ + --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \ + --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \ + --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \ + --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \ + --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \ + --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \ + --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \ + --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \ + --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \ + --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \ + --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00 # via # -r requirements.in # gcp-releasetool @@ -254,9 +258,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d +jinja2==3.1.5 \ + --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ + --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb # via gcp-releasetool keyring==25.4.1 \ --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ diff --git a/CHANGELOG.md b/CHANGELOG.md index be42c60..19fe645 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.2](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.1...v1.4.2) (2025-03-04) + + +### Bug Fixes + +* Remove unbox json functionality from JSONArrowType ([#325](https://github.com/googleapis/python-db-dtypes-pandas/issues/325)) ([60deef1](https://github.com/googleapis/python-db-dtypes-pandas/commit/60deef1636ba3e4f88725db8b9ce23b634168ac2)) + ## [1.4.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.0...v1.4.1) (2025-01-30) diff --git a/db_dtypes/__init__.py b/db_dtypes/__init__.py index d5b05dc..2424ff4 100644 --- a/db_dtypes/__init__.py +++ b/db_dtypes/__init__.py @@ -50,7 +50,7 @@ # To use JSONArray and JSONDtype, you'll need Pandas 1.5.0 or later. With the removal # of Python 3.7 compatibility, the minimum Pandas version will be updated to 1.5.0. if packaging.version.Version(pandas.__version__) >= packaging.version.Version("1.5.0"): - from db_dtypes.json import JSONArray, JSONArrowScalar, JSONArrowType, JSONDtype + from db_dtypes.json import JSONArray, JSONArrowType, JSONDtype else: JSONArray = None JSONDtype = None @@ -375,7 +375,6 @@ def __sub__(self, other): "JSONDtype", "JSONArray", "JSONArrowType", - "JSONArrowScalar", "TimeArray", "TimeDtype", ] diff --git a/db_dtypes/json.py b/db_dtypes/json.py index 99e0c67..37aad83 100644 --- a/db_dtypes/json.py +++ b/db_dtypes/json.py @@ -256,11 +256,6 @@ def __array__(self, dtype=None, copy: bool | None = None) -> np.ndarray: return result -class JSONArrowScalar(pa.ExtensionScalar): - def as_py(self): - return JSONArray._deserialize_json(self.value.as_py() if self.value else None) - - class JSONArrowType(pa.ExtensionType): """Arrow extension type for the `dbjson` Pandas extension type.""" @@ -280,9 +275,6 @@ def __hash__(self) -> int: def to_pandas_dtype(self): return JSONDtype() - def __arrow_ext_scalar_class__(self): - return JSONArrowScalar - # Register the type to be included in RecordBatches, sent over IPC and received in # another Python process. diff --git a/db_dtypes/version.py b/db_dtypes/version.py index 3055780..c97e3ca 100644 --- a/db_dtypes/version.py +++ b/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.4.1" +__version__ = "1.4.2" diff --git a/noxfile.py b/noxfile.py index bf5b3d2..c487cd7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -199,10 +199,7 @@ def prerelease(session, tests_path): "--prefer-binary", "--pre", "--upgrade", - # Limit pyarrow to versions prior to 20.0.0.dev19 to prevent a RuntimeWarning - # during import. This workaround can be removed once the underlying issue - # in pyarrow is resolved (see: https://github.com/apache/arrow/issues/45380). - "pyarrow<=20.0.0.dev18", + "pyarrow", ) # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer # prerelease comes out, this constraint can be removed. See diff --git a/tests/unit/test_json.py b/tests/unit/test_json.py index ff2c867..d15cfc7 100644 --- a/tests/unit/test_json.py +++ b/tests/unit/test_json.py @@ -13,7 +13,6 @@ # limitations under the License. import json -import math import numpy as np import pandas as pd @@ -160,20 +159,15 @@ def test_json_arrow_to_pandas(): s = arr.to_pandas() assert isinstance(s.dtypes, db_dtypes.JSONDtype) assert s[0] - assert s[1] == 100 - assert math.isclose(s[2], 0.98) - assert s[3] == "hello world" - assert math.isclose(s[4][0], 0.1) - assert math.isclose(s[4][1], 0.2) - assert s[5] == { - "null_field": None, - "order": { - "items": ["book", "pen", "computer"], - "total": 15, - "address": {"street": "123 Main St", "city": "Anytown"}, - }, - } - assert pd.isna(s[6]) + assert s[1] == "100" + assert s[2] == "0.98" + assert s[3] == '"hello world"' + assert s[4] == "[0.1,0.2]" + assert ( + s[5] + == '{"null_field":null,"order":{"address":{"city":"Anytown","street":"123 Main St"},"items":["book","pen","computer"],"total":15}}' + ) + assert s[6] == "null" def test_json_arrow_to_pylist(): @@ -186,20 +180,15 @@ def test_json_arrow_to_pylist(): s = arr.to_pylist() assert isinstance(s, list) assert s[0] - assert s[1] == 100 - assert math.isclose(s[2], 0.98) - assert s[3] == "hello world" - assert math.isclose(s[4][0], 0.1) - assert math.isclose(s[4][1], 0.2) - assert s[5] == { - "null_field": None, - "order": { - "items": ["book", "pen", "computer"], - "total": 15, - "address": {"street": "123 Main St", "city": "Anytown"}, - }, - } - assert s[6] is None + assert s[1] == "100" + assert s[2] == "0.98" + assert s[3] == '"hello world"' + assert s[4] == "[0.1,0.2]" + assert ( + s[5] + == '{"null_field":null,"order":{"address":{"city":"Anytown","street":"123 Main St"},"items":["book","pen","computer"],"total":15}}' + ) + assert s[6] == "null" def test_json_arrow_record_batch(): @@ -226,17 +215,12 @@ def test_json_arrow_record_batch(): assert isinstance(s, list) assert s[0] - assert s[1] == 100 - assert math.isclose(s[2], 0.98) - assert s[3] == "hello world" - assert math.isclose(s[4][0], 0.1) - assert math.isclose(s[4][1], 0.2) - assert s[5] == { - "null_field": None, - "order": { - "items": ["book", "pen", "computer"], - "total": 15, - "address": {"street": "123 Main St", "city": "Anytown"}, - }, - } - assert s[6] is None + assert s[1] == "100" + assert s[2] == "0.98" + assert s[3] == '"hello world"' + assert s[4] == "[0.1,0.2]" + assert ( + s[5] + == '{"null_field":null,"order":{"address":{"city":"Anytown","street":"123 Main St"},"items":["book","pen","computer"],"total":15}}' + ) + assert s[6] == "null"