diff --git a/.coveragerc b/.coveragerc
index 1ed1a9704..04092257a 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -12,8 +12,3 @@ exclude_lines =
pragma: NO COVER
# Ignore debug-only repr
def __repr__
- # Ignore pkg_resources exceptions.
- # This is added at the module level as a safeguard for if someone
- # generates the code and tries to run it without pip installing. This
- # makes it virtually impossible to test properly.
- except pkg_resources.DistributionNotFound
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 7f291dbd5..773c1dfd2 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99
-# created: 2023-10-18T20:26:37.410353675Z
+ digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c
+# created: 2023-11-29T14:54:29.548172703Z
diff --git a/.kokoro/presubmit/prerelease-deps-3.11.cfg b/.kokoro/presubmit/prerelease-deps-3.12.cfg
similarity index 77%
rename from .kokoro/presubmit/prerelease-deps-3.11.cfg
rename to .kokoro/presubmit/prerelease-deps-3.12.cfg
index 1e19f1239..ece962a17 100644
--- a/.kokoro/presubmit/prerelease-deps-3.11.cfg
+++ b/.kokoro/presubmit/prerelease-deps-3.12.cfg
@@ -3,5 +3,5 @@
# Only run this nox session.
env_vars: {
key: "NOX_SESSION"
- value: "prerelease_deps-3.11"
+ value: "prerelease_deps-3.12"
}
diff --git a/.kokoro/presubmit/snippets-3.11.cfg b/.kokoro/presubmit/snippets-3.12.cfg
similarity index 81%
rename from .kokoro/presubmit/snippets-3.11.cfg
rename to .kokoro/presubmit/snippets-3.12.cfg
index 74af6dd07..1381e8323 100644
--- a/.kokoro/presubmit/snippets-3.11.cfg
+++ b/.kokoro/presubmit/snippets-3.12.cfg
@@ -3,5 +3,5 @@
# Only run this nox session.
env_vars: {
key: "NOX_SESSION"
- value: "snippets-3.11"
+ value: "snippets-3.12"
}
diff --git a/.kokoro/presubmit/system-3.11.cfg b/.kokoro/presubmit/system-3.12.cfg
similarity index 82%
rename from .kokoro/presubmit/system-3.11.cfg
rename to .kokoro/presubmit/system-3.12.cfg
index 5ff31a603..789455bd6 100644
--- a/.kokoro/presubmit/system-3.11.cfg
+++ b/.kokoro/presubmit/system-3.12.cfg
@@ -3,5 +3,5 @@
# Only run this nox session.
env_vars: {
key: "NOX_SESSION"
- value: "system-3.11"
+ value: "system-3.12"
}
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
index 16170d0ca..e5c1ffca9 100644
--- a/.kokoro/requirements.txt
+++ b/.kokoro/requirements.txt
@@ -4,91 +4,75 @@
#
# pip-compile --allow-unsafe --generate-hashes requirements.in
#
-argcomplete==2.0.0 \
- --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \
- --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e
+argcomplete==3.1.4 \
+ --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \
+ --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f
# via nox
-attrs==22.1.0 \
- --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \
- --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c
+attrs==23.1.0 \
+ --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
+ --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
# via gcp-releasetool
-bleach==5.0.1 \
- --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \
- --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c
- # via readme-renderer
-cachetools==5.2.0 \
- --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \
- --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db
+cachetools==5.3.2 \
+ --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \
+ --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1
# via google-auth
certifi==2023.7.22 \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
# via requests
-cffi==1.15.1 \
- --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
- --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
- --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
- --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
- --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
- --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
- --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
- --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
- --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
- --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
- --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
- --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
- --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
- --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
- --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
- --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
- --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
- --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
- --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
- --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
- --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
- --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
- --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
- --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
- --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
- --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
- --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
- --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
- --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
- --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
- --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
- --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
- --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
- --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
- --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
- --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
- --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
- --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
- --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
- --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
- --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
- --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
- --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
- --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
- --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
- --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
- --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
- --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
- --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
- --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
- --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
- --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
- --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
- --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
- --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
- --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
- --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
- --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
- --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
- --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
- --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
- --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
- --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
- --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
+cffi==1.16.0 \
+ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \
+ --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \
+ --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \
+ --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
+ --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
+ --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \
+ --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \
+ --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \
+ --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \
+ --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \
+ --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \
+ --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \
+ --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \
+ --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \
+ --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
+ --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \
+ --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \
+ --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \
+ --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \
+ --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \
+ --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \
+ --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \
+ --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
+ --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \
+ --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \
+ --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
+ --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \
+ --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \
+ --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \
+ --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
+ --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \
+ --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \
+ --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
+ --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
+ --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \
+ --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \
+ --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
+ --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \
+ --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \
+ --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \
+ --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
+ --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \
+ --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \
+ --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \
+ --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \
+ --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \
+ --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
+ --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \
+ --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \
+ --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \
+ --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
+ --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357
# via cryptography
charset-normalizer==2.1.1 \
--hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
@@ -109,78 +93,74 @@ colorlog==6.7.0 \
# via
# gcp-docuploader
# nox
-commonmark==0.9.1 \
- --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
- --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9
- # via rich
-cryptography==41.0.4 \
- --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \
- --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \
- --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \
- --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \
- --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \
- --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \
- --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \
- --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \
- --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \
- --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \
- --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \
- --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \
- --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \
- --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \
- --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \
- --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \
- --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \
- --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \
- --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \
- --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \
- --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \
- --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \
- --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f
+cryptography==41.0.6 \
+ --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
+ --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
+ --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
+ --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
+ --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
+ --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
+ --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
+ --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
+ --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
+ --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
+ --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
+ --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
+ --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
+ --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
+ --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
+ --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
+ --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
+ --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
+ --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
+ --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
+ --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
+ --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
+ --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
# via
# gcp-releasetool
# secretstorage
-distlib==0.3.6 \
- --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \
- --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e
+distlib==0.3.7 \
+ --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \
+ --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8
# via virtualenv
-docutils==0.19 \
- --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \
- --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc
+docutils==0.20.1 \
+ --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \
+ --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b
# via readme-renderer
-filelock==3.8.0 \
- --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \
- --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4
+filelock==3.13.1 \
+ --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \
+ --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c
# via virtualenv
-gcp-docuploader==0.6.4 \
- --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \
- --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf
+gcp-docuploader==0.6.5 \
+ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
+ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
# via -r requirements.in
-gcp-releasetool==1.10.5 \
- --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \
- --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9
+gcp-releasetool==1.16.0 \
+ --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \
+ --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63
# via -r requirements.in
-google-api-core==2.10.2 \
- --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \
- --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e
+google-api-core==2.12.0 \
+ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \
+ --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160
# via
# google-cloud-core
# google-cloud-storage
-google-auth==2.14.1 \
- --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \
- --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016
+google-auth==2.23.4 \
+ --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \
+ --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2
# via
# gcp-releasetool
# google-api-core
# google-cloud-core
# google-cloud-storage
-google-cloud-core==2.3.2 \
- --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \
- --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a
+google-cloud-core==2.3.3 \
+ --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \
+ --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863
# via google-cloud-storage
-google-cloud-storage==2.6.0 \
- --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \
- --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9
+google-cloud-storage==2.13.0 \
+ --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \
+ --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7
# via gcp-docuploader
google-crc32c==1.5.0 \
--hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \
@@ -251,29 +231,31 @@ google-crc32c==1.5.0 \
--hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \
--hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \
--hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4
- # via google-resumable-media
-google-resumable-media==2.4.0 \
- --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \
- --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f
+ # via
+ # google-cloud-storage
+ # google-resumable-media
+google-resumable-media==2.6.0 \
+ --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \
+ --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b
# via google-cloud-storage
-googleapis-common-protos==1.57.0 \
- --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \
- --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c
+googleapis-common-protos==1.61.0 \
+ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \
+ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b
# via google-api-core
idna==3.4 \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
# via requests
-importlib-metadata==5.0.0 \
- --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \
- --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43
+importlib-metadata==6.8.0 \
+ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \
+ --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743
# via
# -r requirements.in
# keyring
# twine
-jaraco-classes==3.2.3 \
- --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \
- --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a
+jaraco-classes==3.3.0 \
+ --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \
+ --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621
# via keyring
jeepney==0.8.0 \
--hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
@@ -285,75 +267,121 @@ jinja2==3.1.2 \
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
# via gcp-releasetool
-keyring==23.11.0 \
- --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \
- --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361
+keyring==24.2.0 \
+ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \
+ --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509
# via
# gcp-releasetool
# twine
-markupsafe==2.1.1 \
- --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \
- --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \
- --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \
- --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \
- --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \
- --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \
- --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \
- --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \
- --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \
- --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \
- --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \
- --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \
- --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \
- --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \
- --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \
- --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \
- --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \
- --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \
- --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \
- --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \
- --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \
- --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \
- --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \
- --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \
- --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \
- --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \
- --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \
- --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \
- --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \
- --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \
- --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \
- --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \
- --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \
- --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \
- --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \
- --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \
- --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \
- --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \
- --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \
- --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7
+markdown-it-py==3.0.0 \
+ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
+ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
+ # via rich
+markupsafe==2.1.3 \
+ --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
+ --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
+ --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
+ --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
+ --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
+ --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
+ --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
+ --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
+ --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
+ --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
+ --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
+ --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
+ --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
+ --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
+ --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
+ --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
+ --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
+ --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
+ --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
+ --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
+ --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
+ --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
+ --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
+ --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
+ --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
+ --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
+ --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
+ --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
+ --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
+ --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
+ --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
+ --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
+ --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
+ --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
+ --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
+ --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
+ --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
+ --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
+ --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
+ --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
+ --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
+ --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
+ --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
+ --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
+ --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
+ --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
+ --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
+ --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
+ --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
+ --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
+ --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
+ --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
+ --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
+ --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
+ --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
+ --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
+ --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
+ --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
+ --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
+ --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
# via jinja2
-more-itertools==9.0.0 \
- --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \
- --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab
+mdurl==0.1.2 \
+ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
+ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
+ # via markdown-it-py
+more-itertools==10.1.0 \
+ --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \
+ --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6
# via jaraco-classes
-nox==2022.11.21 \
- --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \
- --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684
+nh3==0.2.14 \
+ --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \
+ --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \
+ --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \
+ --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \
+ --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \
+ --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \
+ --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \
+ --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \
+ --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \
+ --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \
+ --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \
+ --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \
+ --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \
+ --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \
+ --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \
+ --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75
+ # via readme-renderer
+nox==2023.4.22 \
+ --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \
+ --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f
# via -r requirements.in
-packaging==21.3 \
- --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
- --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
+packaging==23.2 \
+ --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \
+ --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7
# via
# gcp-releasetool
# nox
-pkginfo==1.8.3 \
- --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \
- --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c
+pkginfo==1.9.6 \
+ --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \
+ --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046
# via twine
-platformdirs==2.5.4 \
- --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \
- --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10
+platformdirs==3.11.0 \
+ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \
+ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e
# via virtualenv
protobuf==3.20.3 \
--hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \
@@ -383,34 +411,30 @@ protobuf==3.20.3 \
# gcp-releasetool
# google-api-core
# googleapis-common-protos
-pyasn1==0.4.8 \
- --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
- --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
+pyasn1==0.5.0 \
+ --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
+ --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
# via
# pyasn1-modules
# rsa
-pyasn1-modules==0.2.8 \
- --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
- --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
+pyasn1-modules==0.3.0 \
+ --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
+ --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
# via google-auth
pycparser==2.21 \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
# via cffi
-pygments==2.15.0 \
- --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \
- --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500
+pygments==2.16.1 \
+ --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \
+ --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29
# via
# readme-renderer
# rich
-pyjwt==2.6.0 \
- --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
- --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
+pyjwt==2.8.0 \
+ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \
+ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320
# via gcp-releasetool
-pyparsing==3.0.9 \
- --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
- --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
- # via packaging
pyperclip==1.8.2 \
--hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57
# via gcp-releasetool
@@ -418,9 +442,9 @@ python-dateutil==2.8.2 \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
# via gcp-releasetool
-readme-renderer==37.3 \
- --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \
- --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343
+readme-renderer==42.0 \
+ --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \
+ --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1
# via twine
requests==2.31.0 \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
@@ -431,17 +455,17 @@ requests==2.31.0 \
# google-cloud-storage
# requests-toolbelt
# twine
-requests-toolbelt==0.10.1 \
- --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \
- --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d
+requests-toolbelt==1.0.0 \
+ --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \
+ --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06
# via twine
rfc3986==2.0.0 \
--hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \
--hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c
# via twine
-rich==12.6.0 \
- --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \
- --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0
+rich==13.6.0 \
+ --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \
+ --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef
# via twine
rsa==4.9 \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
@@ -455,43 +479,37 @@ six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
# via
- # bleach
# gcp-docuploader
- # google-auth
# python-dateutil
-twine==4.0.1 \
- --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \
- --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0
+twine==4.0.2 \
+ --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \
+ --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8
# via -r requirements.in
-typing-extensions==4.4.0 \
- --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \
- --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e
+typing-extensions==4.8.0 \
+ --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \
+ --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef
# via -r requirements.in
-urllib3==1.26.18 \
- --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
- --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
+urllib3==2.0.7 \
+ --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \
+ --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e
# via
# requests
# twine
-virtualenv==20.16.7 \
- --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \
- --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29
+virtualenv==20.24.6 \
+ --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \
+ --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381
# via nox
-webencodings==0.5.1 \
- --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
- --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
- # via bleach
-wheel==0.38.4 \
- --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \
- --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8
+wheel==0.41.3 \
+ --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \
+ --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841
# via -r requirements.in
-zipp==3.10.0 \
- --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \
- --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8
+zipp==3.17.0 \
+ --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \
+ --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
-setuptools==65.5.1 \
- --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
- --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
+setuptools==68.2.2 \
+ --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \
+ --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a
# via -r requirements.in
diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg
new file mode 100644
index 000000000..6eb699edd
--- /dev/null
+++ b/.kokoro/samples/python3.12/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.12"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-312"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.12/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
new file mode 100644
index 000000000..5aa01bab5
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
new file mode 100644
index 000000000..71cd1e597
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.12/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 41206fd78..c1bd5b389 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,33 @@
[1]: https://pypi.org/project/google-cloud-bigquery/#history
+## [3.14.0](https://github.com/googleapis/python-bigquery/compare/v3.13.0...v3.14.0) (2023-12-08)
+
+
+### Features
+
+* Add `Client.query_and_wait` which directly returns a `RowIterator` of results ([#1722](https://github.com/googleapis/python-bigquery/issues/1722)) ([89a647e](https://github.com/googleapis/python-bigquery/commit/89a647e19fe5d7302c0a39bba77a155635c5c29d))
+* Add `job_id`, `location`, `project`, and `query_id` properties on `RowIterator` ([#1733](https://github.com/googleapis/python-bigquery/issues/1733)) ([494f275](https://github.com/googleapis/python-bigquery/commit/494f275ab2493dc7904f685c4d12e60bef51ab21))
+* Add `job_timeout_ms` to job configuration classes ([#1675](https://github.com/googleapis/python-bigquery/issues/1675)) ([84d64cd](https://github.com/googleapis/python-bigquery/commit/84d64cdd157afef4a7bf7807e557d59452133434))
+* Add support dataset.max_time_travel_hours ([#1683](https://github.com/googleapis/python-bigquery/issues/1683)) ([f22eff2](https://github.com/googleapis/python-bigquery/commit/f22eff25f116f1c4973ac2b8b03bc8a4ae1f3f42))
+* Add support for Dataset.isCaseInsensitive ([#1671](https://github.com/googleapis/python-bigquery/issues/1671)) ([386fa86](https://github.com/googleapis/python-bigquery/commit/386fa86c89b8cff69fc02213254a1c53c02fee42))
+* Add support for Python 3.12 ([#1736](https://github.com/googleapis/python-bigquery/issues/1736)) ([3c0976a](https://github.com/googleapis/python-bigquery/commit/3c0976aecb0f917477feef4e9ed865997c2bb106))
+* Removed pkg_resources from all test files and moved importlib into pandas extra ([#1726](https://github.com/googleapis/python-bigquery/issues/1726)) ([1f4ebb1](https://github.com/googleapis/python-bigquery/commit/1f4ebb1eca4f9380a31172fc8cb2fae125f8c5a2))
+* Support data_governance_type ([#1708](https://github.com/googleapis/python-bigquery/issues/1708)) ([eff365d](https://github.com/googleapis/python-bigquery/commit/eff365dc17755d0855338e2f273428ffe2056f67))
+
+
+### Bug Fixes
+
+* `load_table_from_dataframe` now assumes there may be local null values ([#1735](https://github.com/googleapis/python-bigquery/issues/1735)) ([f05dc69](https://github.com/googleapis/python-bigquery/commit/f05dc69a1f8c65ac32085bfcc6950c2c83f8a843))
+* Ensure query job retry has longer deadline than API request deadline ([#1734](https://github.com/googleapis/python-bigquery/issues/1734)) ([5573579](https://github.com/googleapis/python-bigquery/commit/55735791122f97b7f67cb962b489fd1f12210af5))
+* Keep `RowIterator.total_rows` populated after iteration ([#1748](https://github.com/googleapis/python-bigquery/issues/1748)) ([8482f47](https://github.com/googleapis/python-bigquery/commit/8482f4759ce3c4b00fa06a7f306a2ac4d4ee8eb7))
+* Move grpc, proto-plus and protobuf packages to extras ([#1721](https://github.com/googleapis/python-bigquery/issues/1721)) ([5ce4d13](https://github.com/googleapis/python-bigquery/commit/5ce4d136af97b91fbe1cc56bba1021e50a9c8476))
+
+
+### Performance Improvements
+
+* Use the first page a results when `query(api_method="QUERY")` ([#1723](https://github.com/googleapis/python-bigquery/issues/1723)) ([6290517](https://github.com/googleapis/python-bigquery/commit/6290517d6b153a31f20098f75aee580b7915aca9))
+
## [3.13.0](https://github.com/googleapis/python-bigquery/compare/v3.12.0...v3.13.0) (2023-10-30)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 5dc30a1f8..7be61e6b6 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.11 -- -k
+ $ nox -s unit-3.12 -- -k
.. note::
@@ -226,12 +226,14 @@ We support:
- `Python 3.9`_
- `Python 3.10`_
- `Python 3.11`_
+- `Python 3.12`_
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
+.. _Python 3.12: https://docs.python.org/3.12/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/google/cloud/bigquery/__init__.py b/google/cloud/bigquery/__init__.py
index 40e3a1578..72576e608 100644
--- a/google/cloud/bigquery/__init__.py
+++ b/google/cloud/bigquery/__init__.py
@@ -202,6 +202,7 @@
# Custom exceptions
"LegacyBigQueryStorageError",
"LegacyPyarrowError",
+ "LegacyPandasError",
]
diff --git a/google/cloud/bigquery/_job_helpers.py b/google/cloud/bigquery/_job_helpers.py
index 09daaa2a2..095de4faa 100644
--- a/google/cloud/bigquery/_job_helpers.py
+++ b/google/cloud/bigquery/_job_helpers.py
@@ -12,9 +12,32 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Helpers for interacting with the job REST APIs from the client."""
+"""Helpers for interacting with the job REST APIs from the client.
+
+For queries, there are three cases to consider:
+
+1. jobs.insert: This always returns a job resource.
+2. jobs.query, jobCreationMode=JOB_CREATION_REQUIRED:
+ This sometimes can return the results inline, but always includes a job ID.
+3. jobs.query, jobCreationMode=JOB_CREATION_OPTIONAL:
+ This sometimes doesn't create a job at all, instead returning the results.
+ For better debugging, an auto-generated query ID is included in the
+ response.
+
+Client.query() calls either (1) or (2), depending on what the user provides
+for the api_method parameter. query() always returns a QueryJob object, which
+can retry the query when the query job fails for a retriable reason.
+
+Client.query_and_wait() calls (3). This returns a RowIterator that may wrap
+local results from the response or may wrap a query job containing multiple
+pages of results. Even though query_and_wait() waits for the job to complete,
+we still need a separate job_retry object because there are different
+predicates where it is safe to generate a new query ID.
+"""
import copy
+import functools
+import os
import uuid
from typing import Any, Dict, TYPE_CHECKING, Optional
@@ -22,6 +45,8 @@
from google.api_core import retry as retries
from google.cloud.bigquery import job
+import google.cloud.bigquery.query
+from google.cloud.bigquery import table
# Avoid circular imports
if TYPE_CHECKING: # pragma: NO COVER
@@ -58,6 +83,25 @@ def make_job_id(job_id: Optional[str] = None, prefix: Optional[str] = None) -> s
return str(uuid.uuid4())
+def job_config_with_defaults(
+ job_config: Optional[job.QueryJobConfig],
+ default_job_config: Optional[job.QueryJobConfig],
+) -> Optional[job.QueryJobConfig]:
+ """Create a copy of `job_config`, replacing unset values with those from
+ `default_job_config`.
+ """
+ if job_config is None:
+ return default_job_config
+
+ if default_job_config is None:
+ return job_config
+
+ # Both job_config and default_job_config are not None, so make a copy of
+ # job_config merged with default_job_config. Anything already explicitly
+ # set on job_config should not be replaced.
+ return job_config._fill_from_default(default_job_config)
+
+
def query_jobs_insert(
client: "Client",
query: str,
@@ -66,9 +110,9 @@ def query_jobs_insert(
job_id_prefix: Optional[str],
location: Optional[str],
project: str,
- retry: retries.Retry,
+ retry: Optional[retries.Retry],
timeout: Optional[float],
- job_retry: retries.Retry,
+ job_retry: Optional[retries.Retry],
) -> job.QueryJob:
"""Initiate a query using jobs.insert.
@@ -122,7 +166,13 @@ def do_query():
return future
-def _to_query_request(job_config: Optional[job.QueryJobConfig]) -> Dict[str, Any]:
+def _to_query_request(
+ job_config: Optional[job.QueryJobConfig] = None,
+ *,
+ query: str,
+ location: Optional[str] = None,
+ timeout: Optional[float] = None,
+) -> Dict[str, Any]:
"""Transform from Job resource to QueryRequest resource.
Most of the keys in job.configuration.query are in common with
@@ -149,6 +199,15 @@ def _to_query_request(job_config: Optional[job.QueryJobConfig]) -> Dict[str, Any
request_body.setdefault("formatOptions", {})
request_body["formatOptions"]["useInt64Timestamp"] = True # type: ignore
+ if timeout is not None:
+ # Subtract a buffer for context switching, network latency, etc.
+ request_body["timeoutMs"] = max(0, int(1000 * timeout) - _TIMEOUT_BUFFER_MILLIS)
+
+ if location is not None:
+ request_body["location"] = location
+
+ request_body["query"] = query
+
return request_body
@@ -197,20 +256,19 @@ def _to_query_job(
job_complete = query_response.get("jobComplete")
if job_complete:
query_job._properties["status"]["state"] = "DONE"
- # TODO: https://github.com/googleapis/python-bigquery/issues/589
- # Set the first page of results if job is "complete" and there is
- # only 1 page of results. Otherwise, use the existing logic that
- # refreshes the job stats.
- #
- # This also requires updates to `to_dataframe` and the DB API connector
- # so that they don't try to read from a destination table if all the
- # results are present.
+ query_job._query_results = google.cloud.bigquery.query._QueryResults(
+ query_response
+ )
else:
query_job._properties["status"]["state"] = "PENDING"
return query_job
+def _to_query_path(project: str) -> str:
+ return f"/projects/{project}/queries"
+
+
def query_jobs_query(
client: "Client",
query: str,
@@ -221,18 +279,14 @@ def query_jobs_query(
timeout: Optional[float],
job_retry: retries.Retry,
) -> job.QueryJob:
- """Initiate a query using jobs.query.
+ """Initiate a query using jobs.query with jobCreationMode=JOB_CREATION_REQUIRED.
See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query
"""
- path = f"/projects/{project}/queries"
- request_body = _to_query_request(job_config)
-
- if timeout is not None:
- # Subtract a buffer for context switching, network latency, etc.
- request_body["timeoutMs"] = max(0, int(1000 * timeout) - _TIMEOUT_BUFFER_MILLIS)
- request_body["location"] = location
- request_body["query"] = query
+ path = _to_query_path(project)
+ request_body = _to_query_request(
+ query=query, job_config=job_config, location=location, timeout=timeout
+ )
def do_query():
request_body["requestId"] = make_job_id()
@@ -257,3 +311,235 @@ def do_query():
future._job_retry = job_retry
return future
+
+
+def query_and_wait(
+ client: "Client",
+ query: str,
+ *,
+ job_config: Optional[job.QueryJobConfig],
+ location: Optional[str],
+ project: str,
+ api_timeout: Optional[float] = None,
+ wait_timeout: Optional[float] = None,
+ retry: Optional[retries.Retry],
+ job_retry: Optional[retries.Retry],
+ page_size: Optional[int] = None,
+ max_results: Optional[int] = None,
+) -> table.RowIterator:
+ """Run the query, wait for it to finish, and return the results.
+
+ While ``jobCreationMode=JOB_CREATION_OPTIONAL`` is in preview in the
+ ``jobs.query`` REST API, use the default ``jobCreationMode`` unless
+ the environment variable ``QUERY_PREVIEW_ENABLED=true``. After
+ ``jobCreationMode`` is GA, this method will always use
+ ``jobCreationMode=JOB_CREATION_OPTIONAL``. See:
+ https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query
+
+ Args:
+ client:
+ BigQuery client to make API calls.
+ query (str):
+ SQL query to be executed. Defaults to the standard SQL
+ dialect. Use the ``job_config`` parameter to change dialects.
+ job_config (Optional[google.cloud.bigquery.job.QueryJobConfig]):
+ Extra configuration options for the job.
+ To override any options that were previously set in
+ the ``default_query_job_config`` given to the
+ ``Client`` constructor, manually set those options to ``None``,
+ or whatever value is preferred.
+ location (Optional[str]):
+ Location where to run the job. Must match the location of the
+ table used in the query as well as the destination table.
+ project (Optional[str]):
+ Project ID of the project of where to run the job. Defaults
+ to the client's project.
+ api_timeout (Optional[float]):
+ The number of seconds to wait for the underlying HTTP transport
+ before using ``retry``.
+ wait_timeout (Optional[float]):
+ The number of seconds to wait for the query to finish. If the
+ query doesn't finish before this timeout, the client attempts
+ to cancel the query.
+ retry (Optional[google.api_core.retry.Retry]):
+ How to retry the RPC. This only applies to making RPC
+ calls. It isn't used to retry failed jobs. This has
+ a reasonable default that should only be overridden
+ with care.
+ job_retry (Optional[google.api_core.retry.Retry]):
+ How to retry failed jobs. The default retries
+ rate-limit-exceeded errors. Passing ``None`` disables
+ job retry. Not all jobs can be retried.
+ page_size (Optional[int]):
+ The maximum number of rows in each page of results from this
+ request. Non-positive values are ignored.
+ max_results (Optional[int]):
+ The maximum total number of rows from this request.
+
+ Returns:
+ google.cloud.bigquery.table.RowIterator:
+ Iterator of row data
+ :class:`~google.cloud.bigquery.table.Row`-s. During each
+ page, the iterator will have the ``total_rows`` attribute
+ set, which counts the total number of rows **in the result
+ set** (this is distinct from the total number of rows in the
+ current page: ``iterator.page.num_items``).
+
+ If the query is a special query that produces no results, e.g.
+ a DDL query, an ``_EmptyRowIterator`` instance is returned.
+
+ Raises:
+ TypeError:
+ If ``job_config`` is not an instance of
+ :class:`~google.cloud.bigquery.job.QueryJobConfig`
+ class.
+ """
+ # Some API parameters aren't supported by the jobs.query API. In these
+ # cases, fallback to a jobs.insert call.
+ if not _supported_by_jobs_query(job_config):
+ return _wait_or_cancel(
+ query_jobs_insert(
+ client=client,
+ query=query,
+ job_id=None,
+ job_id_prefix=None,
+ job_config=job_config,
+ location=location,
+ project=project,
+ retry=retry,
+ timeout=api_timeout,
+ job_retry=job_retry,
+ ),
+ api_timeout=api_timeout,
+ wait_timeout=wait_timeout,
+ retry=retry,
+ page_size=page_size,
+ max_results=max_results,
+ )
+
+ path = _to_query_path(project)
+ request_body = _to_query_request(
+ query=query, job_config=job_config, location=location, timeout=api_timeout
+ )
+
+ if page_size is not None and max_results is not None:
+ request_body["maxResults"] = min(page_size, max_results)
+ elif page_size is not None or max_results is not None:
+ request_body["maxResults"] = page_size or max_results
+
+ if os.getenv("QUERY_PREVIEW_ENABLED", "").casefold() == "true":
+ request_body["jobCreationMode"] = "JOB_CREATION_OPTIONAL"
+
+ def do_query():
+ request_body["requestId"] = make_job_id()
+ span_attributes = {"path": path}
+
+ # For easier testing, handle the retries ourselves.
+ if retry is not None:
+ response = retry(client._call_api)(
+ retry=None, # We're calling the retry decorator ourselves.
+ span_name="BigQuery.query",
+ span_attributes=span_attributes,
+ method="POST",
+ path=path,
+ data=request_body,
+ timeout=api_timeout,
+ )
+ else:
+ response = client._call_api(
+ retry=None,
+ span_name="BigQuery.query",
+ span_attributes=span_attributes,
+ method="POST",
+ path=path,
+ data=request_body,
+ timeout=api_timeout,
+ )
+
+ # Even if we run with JOB_CREATION_OPTIONAL, if there are more pages
+ # to fetch, there will be a job ID for jobs.getQueryResults.
+ query_results = google.cloud.bigquery.query._QueryResults.from_api_repr(
+ response
+ )
+ page_token = query_results.page_token
+ more_pages = page_token is not None
+
+ if more_pages or not query_results.complete:
+ # TODO(swast): Avoid a call to jobs.get in some cases (few
+ # remaining pages) by waiting for the query to finish and calling
+ # client._list_rows_from_query_results directly. Need to update
+ # RowIterator to fetch destination table via the job ID if needed.
+ return _wait_or_cancel(
+ _to_query_job(client, query, job_config, response),
+ api_timeout=api_timeout,
+ wait_timeout=wait_timeout,
+ retry=retry,
+ page_size=page_size,
+ max_results=max_results,
+ )
+
+ return table.RowIterator(
+ client=client,
+ api_request=functools.partial(client._call_api, retry, timeout=api_timeout),
+ path=None,
+ schema=query_results.schema,
+ max_results=max_results,
+ page_size=page_size,
+ total_rows=query_results.total_rows,
+ first_page_response=response,
+ location=query_results.location,
+ job_id=query_results.job_id,
+ query_id=query_results.query_id,
+ project=query_results.project,
+ )
+
+ if job_retry is not None:
+ return job_retry(do_query)()
+ else:
+ return do_query()
+
+
+def _supported_by_jobs_query(job_config: Optional[job.QueryJobConfig]) -> bool:
+ """True if jobs.query can be used. False if jobs.insert is needed."""
+ if job_config is None:
+ return True
+
+ return (
+ # These features aren't supported by jobs.query.
+ job_config.clustering_fields is None
+ and job_config.destination is None
+ and job_config.destination_encryption_configuration is None
+ and job_config.range_partitioning is None
+ and job_config.table_definitions is None
+ and job_config.time_partitioning is None
+ )
+
+
+def _wait_or_cancel(
+ job: job.QueryJob,
+ api_timeout: Optional[float],
+ wait_timeout: Optional[float],
+ retry: Optional[retries.Retry],
+ page_size: Optional[int],
+ max_results: Optional[int],
+) -> table.RowIterator:
+ """Wait for a job to complete and return the results.
+
+ If we can't return the results within the ``wait_timeout``, try to cancel
+ the job.
+ """
+ try:
+ return job.result(
+ page_size=page_size,
+ max_results=max_results,
+ retry=retry,
+ timeout=wait_timeout,
+ )
+ except Exception:
+ # Attempt to cancel the job since we can't return the results.
+ try:
+ job.cancel(retry=retry, timeout=api_timeout)
+ except Exception:
+ # Don't eat the original exception if cancel fails.
+ pass
+ raise
diff --git a/google/cloud/bigquery/_pandas_helpers.py b/google/cloud/bigquery/_pandas_helpers.py
index 53db9511c..380df7b1d 100644
--- a/google/cloud/bigquery/_pandas_helpers.py
+++ b/google/cloud/bigquery/_pandas_helpers.py
@@ -178,12 +178,18 @@ def bq_to_arrow_field(bq_field, array_type=None):
if arrow_type is not None:
if array_type is not None:
arrow_type = array_type # For GEOGRAPHY, at least initially
- is_nullable = bq_field.mode.upper() == "NULLABLE"
metadata = BQ_FIELD_TYPE_TO_ARROW_FIELD_METADATA.get(
bq_field.field_type.upper() if bq_field.field_type else ""
)
return pyarrow.field(
- bq_field.name, arrow_type, nullable=is_nullable, metadata=metadata
+ bq_field.name,
+ arrow_type,
+ # Even if the remote schema is REQUIRED, there's a chance there's
+ # local NULL values. Arrow will gladly interpret these NULL values
+ # as non-NULL and give you an arbitrary value. See:
+ # https://github.com/googleapis/python-bigquery/issues/1692
+ nullable=True,
+ metadata=metadata,
)
warnings.warn("Unable to determine type for field '{}'.".format(bq_field.name))
diff --git a/google/cloud/bigquery/_versions_helpers.py b/google/cloud/bigquery/_versions_helpers.py
index ce529b76e..4ff4b9700 100644
--- a/google/cloud/bigquery/_versions_helpers.py
+++ b/google/cloud/bigquery/_versions_helpers.py
@@ -24,6 +24,7 @@
_MIN_PYARROW_VERSION = packaging.version.Version("3.0.0")
_MIN_BQ_STORAGE_VERSION = packaging.version.Version("2.0.0")
_BQ_STORAGE_OPTIONAL_READ_SESSION_VERSION = packaging.version.Version("2.6.0")
+_MIN_PANDAS_VERSION = packaging.version.Version("1.1.0")
class PyarrowVersions:
@@ -171,3 +172,65 @@ def try_import(self, raise_if_error: bool = False) -> Any:
BQ_STORAGE_VERSIONS = BQStorageVersions()
+
+
+class PandasVersions:
+ """Version comparisons for pandas package."""
+
+ def __init__(self):
+ self._installed_version = None
+
+ @property
+ def installed_version(self) -> packaging.version.Version:
+ """Return the parsed version of pandas"""
+ if self._installed_version is None:
+ import pandas # type: ignore
+
+ self._installed_version = packaging.version.parse(
+ # Use 0.0.0, since it is earlier than any released version.
+ # Legacy versions also have the same property, but
+ # creating a LegacyVersion has been deprecated.
+ # https://github.com/pypa/packaging/issues/321
+ getattr(pandas, "__version__", "0.0.0")
+ )
+
+ return self._installed_version
+
+ def try_import(self, raise_if_error: bool = False) -> Any:
+ """Verify that a recent enough version of pandas extra is installed.
+ The function assumes that pandas extra is installed, and should thus
+ be used in places where this assumption holds.
+ Because `pip` can install an outdated version of this extra despite
+ the constraints in `setup.py`, the calling code can use this helper
+ to verify the version compatibility at runtime.
+ Returns:
+ The ``pandas`` module or ``None``.
+ Raises:
+ exceptions.LegacyPandasError:
+ If the pandas package is outdated and ``raise_if_error`` is
+ ``True``.
+ """
+ try:
+ import pandas
+ except ImportError as exc: # pragma: NO COVER
+ if raise_if_error:
+ raise exceptions.LegacyPandasError(
+ "pandas package not found. Install pandas version >="
+ f" {_MIN_PANDAS_VERSION}"
+ ) from exc
+ return None
+
+ if self.installed_version < _MIN_PANDAS_VERSION:
+ if raise_if_error:
+ msg = (
+ "Dependency pandas is outdated, please upgrade"
+ f" it to version >= {_MIN_PANDAS_VERSION}"
+ f" (version found: {self.installed_version})."
+ )
+ raise exceptions.LegacyPandasError(msg)
+ return None
+
+ return pandas
+
+
+PANDAS_VERSIONS = PandasVersions()
diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py
index 496015b21..284ccddb5 100644
--- a/google/cloud/bigquery/client.py
+++ b/google/cloud/bigquery/client.py
@@ -115,6 +115,9 @@
from google.cloud.bigquery.table import RowIterator
pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import()
+pandas = (
+ _versions_helpers.PANDAS_VERSIONS.try_import()
+) # mypy check fails because pandas import is outside module, there are type: ignore comments related to this
TimeoutType = Union[float, None]
ResumableTimeoutType = Union[
@@ -124,7 +127,6 @@
if typing.TYPE_CHECKING: # pragma: NO COVER
# os.PathLike is only subscriptable in Python 3.9+, thus shielding with a condition.
PathType = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
- import pandas # type: ignore
import requests # required by api-core
_DEFAULT_CHUNKSIZE = 100 * 1024 * 1024 # 100 MB
@@ -253,23 +255,31 @@ def __init__(
self._connection = Connection(self, **kw_args)
self._location = location
- self._default_query_job_config = copy.deepcopy(default_query_job_config)
self._default_load_job_config = copy.deepcopy(default_load_job_config)
+ # Use property setter so validation can run.
+ self.default_query_job_config = default_query_job_config
+
@property
def location(self):
"""Default location for jobs / datasets / tables."""
return self._location
@property
- def default_query_job_config(self):
- """Default ``QueryJobConfig``.
- Will be merged into job configs passed into the ``query`` method.
+ def default_query_job_config(self) -> Optional[QueryJobConfig]:
+ """Default ``QueryJobConfig`` or ``None``.
+
+ Will be merged into job configs passed into the ``query`` or
+ ``query_and_wait`` methods.
"""
return self._default_query_job_config
@default_query_job_config.setter
- def default_query_job_config(self, value: QueryJobConfig):
+ def default_query_job_config(self, value: Optional[QueryJobConfig]):
+ if value is not None:
+ _verify_job_config_type(
+ value, QueryJobConfig, param_name="default_query_job_config"
+ )
self._default_query_job_config = copy.deepcopy(value)
@property
@@ -2488,7 +2498,7 @@ def load_table_from_file(
def load_table_from_dataframe(
self,
- dataframe: "pandas.DataFrame",
+ dataframe: "pandas.DataFrame", # type: ignore
destination: Union[Table, TableReference, str],
num_retries: int = _DEFAULT_NUM_RETRIES,
job_id: Optional[str] = None,
@@ -3353,26 +3363,12 @@ def query(
if location is None:
location = self.location
- if self._default_query_job_config:
- if job_config:
- _verify_job_config_type(
- job_config, google.cloud.bigquery.job.QueryJobConfig
- )
- # anything that's not defined on the incoming
- # that is in the default,
- # should be filled in with the default
- # the incoming therefore has precedence
- #
- # Note that _fill_from_default doesn't mutate the receiver
- job_config = job_config._fill_from_default(
- self._default_query_job_config
- )
- else:
- _verify_job_config_type(
- self._default_query_job_config,
- google.cloud.bigquery.job.QueryJobConfig,
- )
- job_config = self._default_query_job_config
+ if job_config is not None:
+ _verify_job_config_type(job_config, QueryJobConfig)
+
+ job_config = _job_helpers.job_config_with_defaults(
+ job_config, self._default_query_job_config
+ )
# Note that we haven't modified the original job_config (or
# _default_query_job_config) up to this point.
@@ -3403,6 +3399,112 @@ def query(
else:
raise ValueError(f"Got unexpected value for api_method: {repr(api_method)}")
+ def query_and_wait(
+ self,
+ query,
+ *,
+ job_config: Optional[QueryJobConfig] = None,
+ location: Optional[str] = None,
+ project: Optional[str] = None,
+ api_timeout: TimeoutType = DEFAULT_TIMEOUT,
+ wait_timeout: TimeoutType = None,
+ retry: retries.Retry = DEFAULT_RETRY,
+ job_retry: retries.Retry = DEFAULT_JOB_RETRY,
+ page_size: Optional[int] = None,
+ max_results: Optional[int] = None,
+ ) -> RowIterator:
+ """Run the query, wait for it to finish, and return the results.
+
+ While ``jobCreationMode=JOB_CREATION_OPTIONAL`` is in preview in the
+ ``jobs.query`` REST API, use the default ``jobCreationMode`` unless
+ the environment variable ``QUERY_PREVIEW_ENABLED=true``. After
+ ``jobCreationMode`` is GA, this method will always use
+ ``jobCreationMode=JOB_CREATION_OPTIONAL``. See:
+ https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query
+
+ Args:
+ query (str):
+ SQL query to be executed. Defaults to the standard SQL
+ dialect. Use the ``job_config`` parameter to change dialects.
+ job_config (Optional[google.cloud.bigquery.job.QueryJobConfig]):
+ Extra configuration options for the job.
+ To override any options that were previously set in
+ the ``default_query_job_config`` given to the
+ ``Client`` constructor, manually set those options to ``None``,
+ or whatever value is preferred.
+ location (Optional[str]):
+ Location where to run the job. Must match the location of the
+ table used in the query as well as the destination table.
+ project (Optional[str]):
+ Project ID of the project of where to run the job. Defaults
+ to the client's project.
+ api_timeout (Optional[float]):
+ The number of seconds to wait for the underlying HTTP transport
+ before using ``retry``.
+ wait_timeout (Optional[float]):
+ The number of seconds to wait for the query to finish. If the
+ query doesn't finish before this timeout, the client attempts
+ to cancel the query.
+ retry (Optional[google.api_core.retry.Retry]):
+ How to retry the RPC. This only applies to making RPC
+ calls. It isn't used to retry failed jobs. This has
+ a reasonable default that should only be overridden
+ with care.
+ job_retry (Optional[google.api_core.retry.Retry]):
+ How to retry failed jobs. The default retries
+ rate-limit-exceeded errors. Passing ``None`` disables
+ job retry. Not all jobs can be retried.
+ page_size (Optional[int]):
+ The maximum number of rows in each page of results from this
+ request. Non-positive values are ignored.
+ max_results (Optional[int]):
+ The maximum total number of rows from this request.
+
+ Returns:
+ google.cloud.bigquery.table.RowIterator:
+ Iterator of row data
+ :class:`~google.cloud.bigquery.table.Row`-s. During each
+ page, the iterator will have the ``total_rows`` attribute
+ set, which counts the total number of rows **in the result
+ set** (this is distinct from the total number of rows in the
+ current page: ``iterator.page.num_items``).
+
+ If the query is a special query that produces no results, e.g.
+ a DDL query, an ``_EmptyRowIterator`` instance is returned.
+
+ Raises:
+ TypeError:
+ If ``job_config`` is not an instance of
+ :class:`~google.cloud.bigquery.job.QueryJobConfig`
+ class.
+ """
+ if project is None:
+ project = self.project
+
+ if location is None:
+ location = self.location
+
+ if job_config is not None:
+ _verify_job_config_type(job_config, QueryJobConfig)
+
+ job_config = _job_helpers.job_config_with_defaults(
+ job_config, self._default_query_job_config
+ )
+
+ return _job_helpers.query_and_wait(
+ self,
+ query,
+ job_config=job_config,
+ location=location,
+ project=project,
+ api_timeout=api_timeout,
+ wait_timeout=wait_timeout,
+ retry=retry,
+ job_retry=job_retry,
+ page_size=page_size,
+ max_results=max_results,
+ )
+
def insert_rows(
self,
table: Union[Table, TableReference, str],
@@ -3841,6 +3943,8 @@ def list_rows(
# tables can be fetched without a column filter.
selected_fields=selected_fields,
total_rows=getattr(table, "num_rows", None),
+ project=table.project,
+ location=table.location,
)
return row_iterator
@@ -3849,7 +3953,7 @@ def _list_rows_from_query_results(
job_id: str,
location: str,
project: str,
- schema: SchemaField,
+ schema: Sequence[SchemaField],
total_rows: Optional[int] = None,
destination: Optional[Union[Table, TableReference, TableListItem, str]] = None,
max_results: Optional[int] = None,
@@ -3857,6 +3961,8 @@ def _list_rows_from_query_results(
page_size: Optional[int] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
+ query_id: Optional[str] = None,
+ first_page_response: Optional[Dict[str, Any]] = None,
) -> RowIterator:
"""List the rows of a completed query.
See
@@ -3896,6 +4002,11 @@ def _list_rows_from_query_results(
would otherwise be a successful response.
If multiple requests are made under the hood, ``timeout``
applies to each individual request.
+ query_id (Optional[str]):
+ [Preview] ID of a completed query. This ID is auto-generated
+ and not guaranteed to be populated.
+ first_page_response (Optional[dict]):
+ API response for the first page of results (if available).
Returns:
google.cloud.bigquery.table.RowIterator:
Iterator of row data
@@ -3915,6 +4026,11 @@ def _list_rows_from_query_results(
if start_index is not None:
params["startIndex"] = start_index
+ # We don't call jobs.query with a page size, so if the user explicitly
+ # requests a certain size, invalidate the cache.
+ if page_size is not None:
+ first_page_response = None
+
params["formatOptions.useInt64Timestamp"] = True
row_iterator = RowIterator(
client=self,
@@ -3926,6 +4042,11 @@ def _list_rows_from_query_results(
table=destination,
extra_params=params,
total_rows=total_rows,
+ project=project,
+ location=location,
+ job_id=job_id,
+ query_id=query_id,
+ first_page_response=first_page_response,
)
return row_iterator
diff --git a/google/cloud/bigquery/dataset.py b/google/cloud/bigquery/dataset.py
index af94784a4..c313045ce 100644
--- a/google/cloud/bigquery/dataset.py
+++ b/google/cloud/bigquery/dataset.py
@@ -524,7 +524,9 @@ class Dataset(object):
"default_table_expiration_ms": "defaultTableExpirationMs",
"friendly_name": "friendlyName",
"default_encryption_configuration": "defaultEncryptionConfiguration",
+ "is_case_insensitive": "isCaseInsensitive",
"storage_billing_model": "storageBillingModel",
+ "max_time_travel_hours": "maxTimeTravelHours",
"default_rounding_mode": "defaultRoundingMode",
}
@@ -533,6 +535,28 @@ def __init__(self, dataset_ref) -> None:
dataset_ref = DatasetReference.from_string(dataset_ref)
self._properties = {"datasetReference": dataset_ref.to_api_repr(), "labels": {}}
+ @property
+ def max_time_travel_hours(self):
+ """
+ Optional[int]: Defines the time travel window in hours. The value can
+ be from 48 to 168 hours (2 to 7 days), and in multiple of 24 hours
+ (48, 72, 96, 120, 144, 168).
+ The default value is 168 hours if this is not set.
+ """
+ return self._properties.get("maxTimeTravelHours")
+
+ @max_time_travel_hours.setter
+ def max_time_travel_hours(self, hours):
+ if not isinstance(hours, int):
+ raise ValueError(f"max_time_travel_hours must be an integer. Got {hours}")
+ if hours < 2 * 24 or hours > 7 * 24:
+ raise ValueError(
+ "Time Travel Window should be from 48 to 168 hours (2 to 7 days)"
+ )
+ if hours % 24 != 0:
+ raise ValueError("Time Travel Window should be multiple of 24")
+ self._properties["maxTimeTravelHours"] = hours
+
@property
def default_rounding_mode(self):
"""Union[str, None]: defaultRoundingMode of the dataset as set by the user
@@ -799,6 +823,25 @@ def default_encryption_configuration(self, value):
api_repr = value.to_api_repr()
self._properties["defaultEncryptionConfiguration"] = api_repr
+ @property
+ def is_case_insensitive(self):
+ """Optional[bool]: True if the dataset and its table names are case-insensitive, otherwise False.
+ By default, this is False, which means the dataset and its table names are case-sensitive.
+ This field does not affect routine references.
+
+ Raises:
+ ValueError: for invalid value types.
+ """
+ return self._properties.get("isCaseInsensitive") or False
+
+ @is_case_insensitive.setter
+ def is_case_insensitive(self, value):
+ if not isinstance(value, bool) and value is not None:
+ raise ValueError("Pass a boolean value, or None")
+ if value is None:
+ value = False
+ self._properties["isCaseInsensitive"] = value
+
@property
def storage_billing_model(self):
"""Union[str, None]: StorageBillingModel of the dataset as set by the user
diff --git a/google/cloud/bigquery/exceptions.py b/google/cloud/bigquery/exceptions.py
index e94a6c832..62e0d540c 100644
--- a/google/cloud/bigquery/exceptions.py
+++ b/google/cloud/bigquery/exceptions.py
@@ -29,3 +29,7 @@ class BigQueryStorageNotFoundError(BigQueryError):
"""Raised when BigQuery Storage extra is not installed when trying to
import it.
"""
+
+
+class LegacyPandasError(BigQueryError):
+ """Raised when too old a version of pandas package is detected at runtime."""
diff --git a/google/cloud/bigquery/job/base.py b/google/cloud/bigquery/job/base.py
index a6267be41..97e0ea3bd 100644
--- a/google/cloud/bigquery/job/base.py
+++ b/google/cloud/bigquery/job/base.py
@@ -21,14 +21,13 @@
import typing
from typing import ClassVar, Dict, Optional, Sequence
+from google.api_core import retry as retries
from google.api_core import exceptions
import google.api_core.future.polling
from google.cloud.bigquery import _helpers
from google.cloud.bigquery.retry import DEFAULT_RETRY
-
-if typing.TYPE_CHECKING: # pragma: NO COVER
- from google.api_core import retry as retries
+from google.cloud.bigquery._helpers import _int_or_none
_DONE_STATE = "DONE"
@@ -171,6 +170,37 @@ def __setattr__(self, name, value):
)
super(_JobConfig, self).__setattr__(name, value)
+ @property
+ def job_timeout_ms(self):
+ """Optional parameter. Job timeout in milliseconds. If this time limit is exceeded, BigQuery might attempt to stop the job.
+ https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfiguration.FIELDS.job_timeout_ms
+ e.g.
+
+ job_config = bigquery.QueryJobConfig( job_timeout_ms = 5000 )
+ or
+ job_config.job_timeout_ms = 5000
+
+ Raises:
+ ValueError: If ``value`` type is invalid.
+ """
+
+ # None as this is an optional parameter.
+ if self._properties.get("jobTimeoutMs"):
+ return self._properties["jobTimeoutMs"]
+ return None
+
+ @job_timeout_ms.setter
+ def job_timeout_ms(self, value):
+ try:
+ value = _int_or_none(value)
+ except ValueError as err:
+ raise ValueError("Pass an int for jobTimeoutMs, e.g. 5000").with_traceback(
+ err.__traceback__
+ )
+
+ """ Docs indicate a string is expected by the API """
+ self._properties["jobTimeoutMs"] = str(value)
+
@property
def labels(self):
"""Dict[str, str]: Labels for the job.
@@ -793,7 +823,7 @@ def reload(
def cancel(
self,
client=None,
- retry: "retries.Retry" = DEFAULT_RETRY,
+ retry: Optional[retries.Retry] = DEFAULT_RETRY,
timeout: Optional[float] = None,
) -> bool:
"""API call: cancel job via a POST request
@@ -889,9 +919,9 @@ def done(
self.reload(retry=retry, timeout=timeout)
return self.state == _DONE_STATE
- def result( # type: ignore # (signature complaint)
+ def result( # type: ignore # (incompatible with supertype)
self,
- retry: "retries.Retry" = DEFAULT_RETRY,
+ retry: Optional[retries.Retry] = DEFAULT_RETRY,
timeout: Optional[float] = None,
) -> "_AsyncJob":
"""Start the job and wait for it to complete and get the result.
diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py
index 57186acbc..4a529f949 100644
--- a/google/cloud/bigquery/job/query.py
+++ b/google/cloud/bigquery/job/query.py
@@ -22,6 +22,7 @@
from google.api_core import exceptions
from google.api_core.future import polling as polling_future
+from google.api_core import retry as retries
import requests
from google.cloud.bigquery.dataset import Dataset
@@ -69,7 +70,6 @@
import pandas # type: ignore
import geopandas # type: ignore
import pyarrow # type: ignore
- from google.api_core import retry as retries
from google.cloud import bigquery_storage
from google.cloud.bigquery.client import Client
from google.cloud.bigquery.table import RowIterator
@@ -779,7 +779,7 @@ def to_api_repr(self) -> dict:
resource = copy.deepcopy(self._properties)
# Query parameters have an addition property associated with them
# to indicate if the query is using named or positional parameters.
- query_parameters = resource["query"].get("queryParameters")
+ query_parameters = resource.get("query", {}).get("queryParameters")
if query_parameters:
if query_parameters[0].get("name") is None:
resource["query"]["parameterMode"] = "POSITIONAL"
@@ -930,6 +930,15 @@ def query(self):
self._properties, ["configuration", "query", "query"]
)
+ @property
+ def query_id(self) -> Optional[str]:
+ """[Preview] ID of a completed query.
+
+ This ID is auto-generated and not guaranteed to be populated.
+ """
+ query_results = self._query_results
+ return query_results.query_id if query_results is not None else None
+
@property
def query_parameters(self):
"""See
@@ -1460,14 +1469,14 @@ def _done_or_raise(self, retry=DEFAULT_RETRY, timeout=None):
except exceptions.GoogleAPIError as exc:
self.set_exception(exc)
- def result( # type: ignore # (complaints about the overloaded signature)
+ def result( # type: ignore # (incompatible with supertype)
self,
page_size: Optional[int] = None,
max_results: Optional[int] = None,
- retry: "retries.Retry" = DEFAULT_RETRY,
+ retry: Optional[retries.Retry] = DEFAULT_RETRY,
timeout: Optional[float] = None,
start_index: Optional[int] = None,
- job_retry: "retries.Retry" = DEFAULT_JOB_RETRY,
+ job_retry: Optional[retries.Retry] = DEFAULT_JOB_RETRY,
) -> Union["RowIterator", _EmptyRowIterator]:
"""Start the job and wait for it to complete and get the result.
@@ -1525,7 +1534,12 @@ def result( # type: ignore # (complaints about the overloaded signature)
provided and the job is not retryable.
"""
if self.dry_run:
- return _EmptyRowIterator()
+ return _EmptyRowIterator(
+ project=self.project,
+ location=self.location,
+ # Intentionally omit job_id and query_id since this doesn't
+ # actually correspond to a finished query job.
+ )
try:
retry_do_query = getattr(self, "_retry_do_query", None)
if retry_do_query is not None:
@@ -1572,7 +1586,8 @@ def do_get_result():
# Since the job could already be "done" (e.g. got a finished job
# via client.get_job), the superclass call to done() might not
# set the self._query_results cache.
- self._reload_query_results(retry=retry, timeout=timeout)
+ if self._query_results is None or not self._query_results.complete:
+ self._reload_query_results(retry=retry, timeout=timeout)
if retry_do_query is not None and job_retry is not None:
do_get_result = job_retry(do_get_result)
@@ -1594,7 +1609,21 @@ def do_get_result():
# indicate success and avoid calling tabledata.list on a table which
# can't be read (such as a view table).
if self._query_results.total_rows is None:
- return _EmptyRowIterator()
+ return _EmptyRowIterator(
+ location=self.location,
+ project=self.project,
+ job_id=self.job_id,
+ query_id=self.query_id,
+ )
+
+ # We know that there's at least 1 row, so only treat the response from
+ # jobs.getQueryResults / jobs.query as the first page of the
+ # RowIterator response if there are any rows in it. This prevents us
+ # from stopping the iteration early because we're missing rows and
+ # there's no next page token.
+ first_page_response = self._query_results._properties
+ if "rows" not in first_page_response:
+ first_page_response = None
rows = self._client._list_rows_from_query_results(
self.job_id,
@@ -1608,6 +1637,8 @@ def do_get_result():
start_index=start_index,
retry=retry,
timeout=timeout,
+ query_id=self.query_id,
+ first_page_response=first_page_response,
)
rows._preserve_order = _contains_order_by(self.query)
return rows
diff --git a/google/cloud/bigquery/query.py b/google/cloud/bigquery/query.py
index 944ad884e..43591c648 100644
--- a/google/cloud/bigquery/query.py
+++ b/google/cloud/bigquery/query.py
@@ -911,6 +911,26 @@ def job_id(self):
"""
return self._properties.get("jobReference", {}).get("jobId")
+ @property
+ def location(self):
+ """Location of the query job these results are from.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_reference
+
+ Returns:
+ str: Job ID of the query job.
+ """
+ return self._properties.get("jobReference", {}).get("location")
+
+ @property
+ def query_id(self) -> Optional[str]:
+ """[Preview] ID of a completed query.
+
+ This ID is auto-generated and not guaranteed to be populated.
+ """
+ return self._properties.get("queryId")
+
@property
def page_token(self):
"""Token for fetching next bach of results.
@@ -997,14 +1017,6 @@ def _set_properties(self, api_response):
Args:
api_response (Dict): Response returned from an API call
"""
- job_id_present = (
- "jobReference" in api_response
- and "jobId" in api_response["jobReference"]
- and "projectId" in api_response["jobReference"]
- )
- if not job_id_present:
- raise ValueError("QueryResult requires a job reference")
-
self._properties.clear()
self._properties.update(copy.deepcopy(api_response))
diff --git a/google/cloud/bigquery/retry.py b/google/cloud/bigquery/retry.py
index d0830ed13..b01c0662c 100644
--- a/google/cloud/bigquery/retry.py
+++ b/google/cloud/bigquery/retry.py
@@ -34,7 +34,12 @@
auth_exceptions.TransportError,
)
-_DEFAULT_JOB_DEADLINE = 60.0 * 10.0 # seconds
+_DEFAULT_RETRY_DEADLINE = 10.0 * 60.0 # 10 minutes
+
+# Allow for a few retries after the API request times out. This relevant for
+# rateLimitExceeded errors, which can be raised either by the Google load
+# balancer or the BigQuery job server.
+_DEFAULT_JOB_DEADLINE = 3.0 * _DEFAULT_RETRY_DEADLINE
def _should_retry(exc):
@@ -51,7 +56,7 @@ def _should_retry(exc):
return reason in _RETRYABLE_REASONS
-DEFAULT_RETRY = retry.Retry(predicate=_should_retry, deadline=600.0)
+DEFAULT_RETRY = retry.Retry(predicate=_should_retry, deadline=_DEFAULT_RETRY_DEADLINE)
"""The default retry object.
Any method with a ``retry`` parameter will be retried automatically,
diff --git a/google/cloud/bigquery/routine/routine.py b/google/cloud/bigquery/routine/routine.py
index ef33d507e..83cb6362d 100644
--- a/google/cloud/bigquery/routine/routine.py
+++ b/google/cloud/bigquery/routine/routine.py
@@ -68,6 +68,7 @@ class Routine(object):
"description": "description",
"determinism_level": "determinismLevel",
"remote_function_options": "remoteFunctionOptions",
+ "data_governance_type": "dataGovernanceType",
}
def __init__(self, routine_ref, **kwargs) -> None:
@@ -300,8 +301,8 @@ def determinism_level(self, value):
@property
def remote_function_options(self):
- """Optional[google.cloud.bigquery.routine.RemoteFunctionOptions]: Configures remote function
- options for a routine.
+ """Optional[google.cloud.bigquery.routine.RemoteFunctionOptions]:
+ Configures remote function options for a routine.
Raises:
ValueError:
@@ -329,6 +330,25 @@ def remote_function_options(self, value):
self._PROPERTY_TO_API_FIELD["remote_function_options"]
] = api_repr
+ @property
+ def data_governance_type(self):
+ """Optional[str]: If set to ``DATA_MASKING``, the function is validated
+ and made available as a masking function.
+
+ Raises:
+ ValueError:
+ If the value is not :data:`string` or :data:`None`.
+ """
+ return self._properties.get(self._PROPERTY_TO_API_FIELD["data_governance_type"])
+
+ @data_governance_type.setter
+ def data_governance_type(self, value):
+ if value is not None and not isinstance(value, str):
+ raise ValueError(
+ "invalid data_governance_type, must be a string or `None`."
+ )
+ self._properties[self._PROPERTY_TO_API_FIELD["data_governance_type"]] = value
+
@classmethod
def from_api_repr(cls, resource: dict) -> "Routine":
"""Factory: construct a routine given its API representation.
diff --git a/google/cloud/bigquery/table.py b/google/cloud/bigquery/table.py
index dcba10428..70e601714 100644
--- a/google/cloud/bigquery/table.py
+++ b/google/cloud/bigquery/table.py
@@ -100,6 +100,10 @@
"because the necessary `__from_arrow__` attribute is missing."
)
+# How many of the total rows need to be downloaded already for us to skip
+# calling the BQ Storage API?
+ALMOST_COMPLETELY_CACHED_RATIO = 0.333
+
def _reference_getter(table):
"""A :class:`~google.cloud.bigquery.table.TableReference` pointing to
@@ -1558,6 +1562,10 @@ def __init__(
selected_fields=None,
total_rows=None,
first_page_response=None,
+ location: Optional[str] = None,
+ job_id: Optional[str] = None,
+ query_id: Optional[str] = None,
+ project: Optional[str] = None,
):
super(RowIterator, self).__init__(
client,
@@ -1575,23 +1583,77 @@ def __init__(
self._field_to_index = _helpers._field_to_index_mapping(schema)
self._page_size = page_size
self._preserve_order = False
- self._project = client.project if client is not None else None
self._schema = schema
self._selected_fields = selected_fields
self._table = table
self._total_rows = total_rows
self._first_page_response = first_page_response
+ self._location = location
+ self._job_id = job_id
+ self._query_id = query_id
+ self._project = project
+
+ @property
+ def _billing_project(self) -> Optional[str]:
+ """GCP Project ID where BQ API will bill to (if applicable)."""
+ client = self.client
+ return client.project if client is not None else None
+
+ @property
+ def job_id(self) -> Optional[str]:
+ """ID of the query job (if applicable).
+
+ To get the job metadata, call
+ ``job = client.get_job(rows.job_id, location=rows.location)``.
+ """
+ return self._job_id
+
+ @property
+ def location(self) -> Optional[str]:
+ """Location where the query executed (if applicable).
+
+ See: https://cloud.google.com/bigquery/docs/locations
+ """
+ return self._location
+
+ @property
+ def project(self) -> Optional[str]:
+ """GCP Project ID where these rows are read from."""
+ return self._project
+
+ @property
+ def query_id(self) -> Optional[str]:
+ """[Preview] ID of a completed query.
- def _is_completely_cached(self):
+ This ID is auto-generated and not guaranteed to be populated.
+ """
+ return self._query_id
+
+ def _is_almost_completely_cached(self):
"""Check if all results are completely cached.
This is useful to know, because we can avoid alternative download
mechanisms.
"""
- if self._first_page_response is None or self.next_page_token:
+ if self._first_page_response is None:
return False
- return self._first_page_response.get(self._next_token) is None
+ total_cached_rows = len(self._first_page_response.get(self._items_key, []))
+ if self.max_results is not None and total_cached_rows >= self.max_results:
+ return True
+
+ if (
+ self.next_page_token is None
+ and self._first_page_response.get(self._next_token) is None
+ ):
+ return True
+
+ if self._total_rows is not None:
+ almost_completely = self._total_rows * ALMOST_COMPLETELY_CACHED_RATIO
+ if total_cached_rows >= almost_completely:
+ return True
+
+ return False
def _validate_bqstorage(self, bqstorage_client, create_bqstorage_client):
"""Returns True if the BigQuery Storage API can be used.
@@ -1604,7 +1666,14 @@ def _validate_bqstorage(self, bqstorage_client, create_bqstorage_client):
if not using_bqstorage_api:
return False
- if self._is_completely_cached():
+ if self._table is None:
+ return False
+
+ # The developer is manually paging through results if this is set.
+ if self.next_page_token is not None:
+ return False
+
+ if self._is_almost_completely_cached():
return False
if self.max_results is not None:
@@ -1628,7 +1697,15 @@ def _get_next_page_response(self):
The parsed JSON response of the next page's contents.
"""
if self._first_page_response:
- response = self._first_page_response
+ rows = self._first_page_response.get(self._items_key, [])[
+ : self.max_results
+ ]
+ response = {
+ self._items_key: rows,
+ }
+ if self._next_token in self._first_page_response:
+ response[self._next_token] = self._first_page_response[self._next_token]
+
self._first_page_response = None
return response
@@ -1723,7 +1800,7 @@ def to_arrow_iterable(
bqstorage_download = functools.partial(
_pandas_helpers.download_arrow_bqstorage,
- self._project,
+ self._billing_project,
self._table,
bqstorage_client,
preserve_order=self._preserve_order,
@@ -1903,7 +1980,7 @@ def to_dataframe_iterable(
column_names = [field.name for field in self._schema]
bqstorage_download = functools.partial(
_pandas_helpers.download_dataframe_bqstorage,
- self._project,
+ self._billing_project,
self._table,
bqstorage_client,
column_names,
@@ -2920,9 +2997,9 @@ def _rows_page_start(iterator, page, response):
page._columns = _row_iterator_page_columns(iterator._schema, response)
total_rows = response.get("totalRows")
+ # Don't reset total_rows if it's not present in the next API response.
if total_rows is not None:
- total_rows = int(total_rows)
- iterator._total_rows = total_rows
+ iterator._total_rows = int(total_rows)
# pylint: enable=unused-argument
diff --git a/google/cloud/bigquery/version.py b/google/cloud/bigquery/version.py
index ee029aced..7d9a17e98 100644
--- a/google/cloud/bigquery/version.py
+++ b/google/cloud/bigquery/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "3.13.0"
+__version__ = "3.14.0"
diff --git a/noxfile.py b/noxfile.py
index a2b7a6843..41492c7f0 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -37,8 +37,8 @@
)
DEFAULT_PYTHON_VERSION = "3.8"
-SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.11"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.11", "3.12"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
@@ -81,7 +81,7 @@ def default(session, install_extras=True):
constraints_path,
)
- if install_extras and session.python == "3.11":
+ if install_extras and session.python in ["3.11", "3.12"]:
install_target = ".[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]"
elif install_extras:
install_target = ".[all]"
@@ -137,7 +137,7 @@ def mypy(session):
"types-requests",
"types-setuptools",
)
- session.run("mypy", "google/cloud", "--show-traceback")
+ session.run("mypy", "-p", "google", "--show-traceback")
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -149,7 +149,8 @@ def pytype(session):
session.install("attrs==20.3.0")
session.install("-e", ".[all]")
session.install(PYTYPE_VERSION)
- session.run("pytype")
+ # See https://github.com/google/pytype/issues/464
+ session.run("pytype", "-P", ".", "google/cloud/bigquery")
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
@@ -186,26 +187,33 @@ def system(session):
# Data Catalog needed for the column ACL test with a real Policy Tag.
session.install("google-cloud-datacatalog", "-c", constraints_path)
- if session.python == "3.11":
+ if session.python in ["3.11", "3.12"]:
extras = "[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]"
else:
extras = "[all]"
session.install("-e", f".{extras}", "-c", constraints_path)
# Run py.test against the system tests.
- session.run("py.test", "--quiet", os.path.join("tests", "system"), *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ os.path.join("tests", "system"),
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
def mypy_samples(session):
"""Run type checks with mypy."""
- session.install("-e", ".[all]")
-
session.install("pytest")
for requirements_path in CURRENT_DIRECTORY.glob("samples/*/requirements.txt"):
- session.install("-r", requirements_path)
+ session.install("-r", str(requirements_path))
session.install(MYPY_VERSION)
+ # requirements.txt might include this package. Install from source so that
+ # we can author samples with unreleased features.
+ session.install("-e", ".[all]")
+
# Just install the dependencies' type info directly, since "mypy --install-types"
# might require an additional pass.
session.install(
@@ -245,7 +253,7 @@ def snippets(session):
session.install("google-cloud-storage", "-c", constraints_path)
session.install("grpcio", "-c", constraints_path)
- if session.python == "3.11":
+ if session.python in ["3.11", "3.12"]:
extras = "[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]"
else:
extras = "[all]"
@@ -258,8 +266,10 @@ def snippets(session):
session.run(
"py.test",
"samples",
+ "--ignore=samples/desktopapp",
"--ignore=samples/magics",
"--ignore=samples/geography",
+ "--ignore=samples/notebooks",
"--ignore=samples/snippets",
*session.posargs,
)
@@ -427,7 +437,7 @@ def docs(session):
)
-@nox.session(python="3.9")
+@nox.session(python="3.10")
def docfx(session):
"""Build the docfx yaml files for this library."""
diff --git a/samples/client_query.py b/samples/client_query.py
index 4df051ee2..80eac854e 100644
--- a/samples/client_query.py
+++ b/samples/client_query.py
@@ -14,6 +14,9 @@
def client_query() -> None:
+ # TODO(swast): remove once docs in cloud.google.com have been updated to
+ # use samples/snippets/client_query.py
+
# [START bigquery_query]
from google.cloud import bigquery
diff --git a/google/cloud/__init__.py b/samples/desktopapp/__init__.py
similarity index 69%
rename from google/cloud/__init__.py
rename to samples/desktopapp/__init__.py
index 8e60d8439..4fbd93bb2 100644
--- a/google/cloud/__init__.py
+++ b/samples/desktopapp/__init__.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,12 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
-
- __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore
diff --git a/samples/desktopapp/conftest.py b/samples/desktopapp/conftest.py
new file mode 100644
index 000000000..fdc85a852
--- /dev/null
+++ b/samples/desktopapp/conftest.py
@@ -0,0 +1,23 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.cloud import bigquery
+import pytest
+
+
+@pytest.fixture
+def bigquery_client_patch(
+ monkeypatch: pytest.MonkeyPatch, bigquery_client: bigquery.Client
+) -> None:
+ monkeypatch.setattr(bigquery, "Client", lambda: bigquery_client)
diff --git a/samples/desktopapp/mypy.ini b/samples/desktopapp/mypy.ini
new file mode 100644
index 000000000..d27b6b599
--- /dev/null
+++ b/samples/desktopapp/mypy.ini
@@ -0,0 +1,8 @@
+[mypy]
+; We require type annotations in all samples.
+strict = True
+exclude = noxfile\.py
+warn_unused_configs = True
+
+[mypy-google.auth,google.oauth2,geojson,google_auth_oauthlib,IPython.*]
+ignore_missing_imports = True
diff --git a/samples/desktopapp/noxfile.py b/samples/desktopapp/noxfile.py
new file mode 100644
index 000000000..3b7135946
--- /dev/null
+++ b/samples/desktopapp/noxfile.py
@@ -0,0 +1,293 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import glob
+import os
+from pathlib import Path
+import sys
+from typing import Callable, Dict, Optional
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+BLACK_VERSION = "black==22.3.0"
+ISORT_VERSION = "isort==5.10.1"
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": [],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ "enforce_type_hints": False,
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append(".")
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars() -> Dict[str, str]:
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG["gcloud_project_env"]
+ # This should error out if not set.
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG["envs"])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to test samples.
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+#
+# Style Checks
+#
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG["enforce_type_hints"]:
+ session.install("flake8")
+ else:
+ session.install("flake8", "flake8-annotations")
+
+ args = FLAKE8_COMMON_ARGS + [
+ ".",
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Black
+#
+
+
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ """Run black. Format code to uniform standard."""
+ session.install(BLACK_VERSION)
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
+
+#
+# format = isort + black
+#
+
+
+@nox.session
+def format(session: nox.sessions.Session) -> None:
+ """
+ Run isort to sort imports. Then run black
+ to format code to uniform standard.
+ """
+ session.install(BLACK_VERSION, ISORT_VERSION)
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ # Use the --fss option to sort imports using strict alphabetical order.
+ # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections
+ session.run("isort", "--fss", *python_files)
+ session.run("black", *python_files)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
+ # check for presence of tests
+ test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob(
+ "**/test_*.py", recursive=True
+ )
+ test_list.extend(glob.glob("**/tests", recursive=True))
+
+ if len(test_list) == 0:
+ print("No tests found, skipping directory.")
+ return
+
+ if TEST_CONFIG["pip_version_override"]:
+ pip_version = TEST_CONFIG["pip_version_override"]
+ session.install(f"pip=={pip_version}")
+ """Runs py.test for a particular project."""
+ concurrent_args = []
+ if os.path.exists("requirements.txt"):
+ if os.path.exists("constraints.txt"):
+ session.install("-r", "requirements.txt", "-c", "constraints.txt")
+ else:
+ session.install("-r", "requirements.txt")
+ with open("requirements.txt") as rfile:
+ packages = rfile.read()
+
+ if os.path.exists("requirements-test.txt"):
+ if os.path.exists("constraints-test.txt"):
+ session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt")
+ else:
+ session.install("-r", "requirements-test.txt")
+ with open("requirements-test.txt") as rtfile:
+ packages += rtfile.read()
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ if "pytest-parallel" in packages:
+ concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"])
+ elif "pytest-xdist" in packages:
+ concurrent_args.extend(["-n", "auto"])
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars(),
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session: nox.sessions.Session) -> None:
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root() -> Optional[str]:
+ """Returns the root folder of the project."""
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session: nox.sessions.Session, path: str) -> None:
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/samples/desktopapp/noxfile_config.py b/samples/desktopapp/noxfile_config.py
new file mode 100644
index 000000000..315bd5be8
--- /dev/null
+++ b/samples/desktopapp/noxfile_config.py
@@ -0,0 +1,40 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Default TEST_CONFIG_OVERRIDE for python repos.
+
+# You can copy this file into your directory, then it will be inported from
+# the noxfile.py.
+
+# The source of truth:
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
+
+TEST_CONFIG_OVERRIDE = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": [
+ "2.7",
+ # TODO: Enable 3.10 once there is a geopandas/fiona release.
+ # https://github.com/Toblerity/Fiona/issues/1043
+ "3.10",
+ ],
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
diff --git a/samples/desktopapp/requirements-test.txt b/samples/desktopapp/requirements-test.txt
new file mode 100644
index 000000000..514f09705
--- /dev/null
+++ b/samples/desktopapp/requirements-test.txt
@@ -0,0 +1,3 @@
+google-cloud-testutils==1.3.3
+pytest==7.4.0
+mock==5.1.0
diff --git a/samples/desktopapp/requirements.txt b/samples/desktopapp/requirements.txt
new file mode 100644
index 000000000..a5b3ad130
--- /dev/null
+++ b/samples/desktopapp/requirements.txt
@@ -0,0 +1,2 @@
+google-cloud-bigquery==3.11.4
+google-auth-oauthlib==1.0.0
diff --git a/samples/snippets/user_credentials.py b/samples/desktopapp/user_credentials.py
similarity index 100%
rename from samples/snippets/user_credentials.py
rename to samples/desktopapp/user_credentials.py
diff --git a/samples/snippets/user_credentials_test.py b/samples/desktopapp/user_credentials_test.py
similarity index 96%
rename from samples/snippets/user_credentials_test.py
rename to samples/desktopapp/user_credentials_test.py
index 8448187de..baa9e33f1 100644
--- a/samples/snippets/user_credentials_test.py
+++ b/samples/desktopapp/user_credentials_test.py
@@ -19,7 +19,7 @@
import mock
import pytest
-from user_credentials import main # type: ignore
+from .user_credentials import main # type: ignore
PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"]
diff --git a/samples/geography/noxfile.py b/samples/geography/noxfile.py
index 1224cbe21..3b7135946 100644
--- a/samples/geography/noxfile.py
+++ b/samples/geography/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/samples/geography/requirements.txt b/samples/geography/requirements.txt
index 9bc6ee32c..d6cea7ec5 100644
--- a/samples/geography/requirements.txt
+++ b/samples/geography/requirements.txt
@@ -7,10 +7,11 @@ click-plugins==1.1.1
cligj==0.7.2
dataclasses==0.8; python_version < '3.7'
db-dtypes==1.1.1
-Fiona==1.9.4.post1
-geojson==3.0.1
+Fiona==1.9.5
+geojson==3.1.0
geopandas===0.10.2; python_version == '3.7'
-geopandas==0.13.2; python_version >= '3.8'
+geopandas==0.13.2; python_version == '3.8'
+geopandas==0.14.1; python_version >= '3.9'
google-api-core==2.11.1
google-auth==2.22.0
google-cloud-bigquery==3.11.4
@@ -19,16 +20,18 @@ google-cloud-core==2.3.3
google-crc32c==1.5.0
google-resumable-media==2.5.0
googleapis-common-protos==1.60.0
-grpcio==1.57.0
+grpcio==1.59.0
idna==3.4
-libcst==1.0.1
+libcst==1.0.1; python_version == '3.7'
+libcst==1.1.0; python_version >= '3.8'
munch==4.0.0
mypy-extensions==1.0.0
packaging==23.1
pandas===1.3.5; python_version == '3.7'
pandas==2.0.3; python_version >= '3.8'
proto-plus==1.22.3
-pyarrow==12.0.1
+pyarrow==12.0.1; python_version == '3.7'
+pyarrow==14.0.1; python_version >= '3.8'
pyasn1==0.5.0
pyasn1-modules==0.3.0
pycparser==2.21
@@ -38,7 +41,7 @@ pytz==2023.3
PyYAML==6.0.1
requests==2.31.0
rsa==4.9
-Shapely==2.0.1
+Shapely==2.0.2
six==1.16.0
typing-extensions==4.7.1
typing-inspect==0.9.0
diff --git a/samples/magics/noxfile.py b/samples/magics/noxfile.py
index 1224cbe21..3b7135946 100644
--- a/samples/magics/noxfile.py
+++ b/samples/magics/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/samples/magics/requirements.txt b/samples/magics/requirements.txt
index c3300ae20..c8f6b2765 100644
--- a/samples/magics/requirements.txt
+++ b/samples/magics/requirements.txt
@@ -1,15 +1,8 @@
db-dtypes==1.1.1
+google.cloud.bigquery==3.11.4
google-cloud-bigquery-storage==2.22.0
-google-auth-oauthlib==1.0.0
-grpcio==1.57.0
-ipywidgets==8.1.0
ipython===7.31.1; python_version == '3.7'
ipython===8.0.1; python_version == '3.8'
ipython==8.14.0; python_version >= '3.9'
-matplotlib===3.5.3; python_version == '3.7'
-matplotlib==3.7.2; python_version >= '3.8'
pandas===1.3.5; python_version == '3.7'
pandas==2.0.3; python_version >= '3.8'
-pyarrow==12.0.1
-pytz==2023.3
-typing-extensions==4.7.1
diff --git a/google/__init__.py b/samples/notebooks/__init__.py
similarity index 69%
rename from google/__init__.py
rename to samples/notebooks/__init__.py
index 8e60d8439..4fbd93bb2 100644
--- a/google/__init__.py
+++ b/samples/notebooks/__init__.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 Google LLC
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,12 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
-
- __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore
diff --git a/samples/notebooks/conftest.py b/samples/notebooks/conftest.py
new file mode 100644
index 000000000..fdc85a852
--- /dev/null
+++ b/samples/notebooks/conftest.py
@@ -0,0 +1,23 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.cloud import bigquery
+import pytest
+
+
+@pytest.fixture
+def bigquery_client_patch(
+ monkeypatch: pytest.MonkeyPatch, bigquery_client: bigquery.Client
+) -> None:
+ monkeypatch.setattr(bigquery, "Client", lambda: bigquery_client)
diff --git a/samples/snippets/jupyter_tutorial_test.py b/samples/notebooks/jupyter_tutorial_test.py
similarity index 100%
rename from samples/snippets/jupyter_tutorial_test.py
rename to samples/notebooks/jupyter_tutorial_test.py
diff --git a/samples/notebooks/mypy.ini b/samples/notebooks/mypy.ini
new file mode 100644
index 000000000..dea60237b
--- /dev/null
+++ b/samples/notebooks/mypy.ini
@@ -0,0 +1,8 @@
+[mypy]
+; We require type annotations in all samples.
+strict = True
+exclude = noxfile\.py
+warn_unused_configs = True
+
+[mypy-IPython.*,nox,noxfile_config,pandas]
+ignore_missing_imports = True
\ No newline at end of file
diff --git a/samples/notebooks/noxfile.py b/samples/notebooks/noxfile.py
new file mode 100644
index 000000000..3b7135946
--- /dev/null
+++ b/samples/notebooks/noxfile.py
@@ -0,0 +1,293 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import glob
+import os
+from pathlib import Path
+import sys
+from typing import Callable, Dict, Optional
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+BLACK_VERSION = "black==22.3.0"
+ISORT_VERSION = "isort==5.10.1"
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": [],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ "enforce_type_hints": False,
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # If you need to use a specific version of pip,
+ # change pip_version_override to the string representation
+ # of the version number, for example, "20.2.4"
+ "pip_version_override": None,
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append(".")
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars() -> Dict[str, str]:
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG["gcloud_project_env"]
+ # This should error out if not set.
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG["envs"])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to test samples.
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+#
+# Style Checks
+#
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session: nox.sessions.Session) -> None:
+ if not TEST_CONFIG["enforce_type_hints"]:
+ session.install("flake8")
+ else:
+ session.install("flake8", "flake8-annotations")
+
+ args = FLAKE8_COMMON_ARGS + [
+ ".",
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Black
+#
+
+
+@nox.session
+def blacken(session: nox.sessions.Session) -> None:
+ """Run black. Format code to uniform standard."""
+ session.install(BLACK_VERSION)
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
+
+#
+# format = isort + black
+#
+
+
+@nox.session
+def format(session: nox.sessions.Session) -> None:
+ """
+ Run isort to sort imports. Then run black
+ to format code to uniform standard.
+ """
+ session.install(BLACK_VERSION, ISORT_VERSION)
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ # Use the --fss option to sort imports using strict alphabetical order.
+ # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections
+ session.run("isort", "--fss", *python_files)
+ session.run("black", *python_files)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
+ # check for presence of tests
+ test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob(
+ "**/test_*.py", recursive=True
+ )
+ test_list.extend(glob.glob("**/tests", recursive=True))
+
+ if len(test_list) == 0:
+ print("No tests found, skipping directory.")
+ return
+
+ if TEST_CONFIG["pip_version_override"]:
+ pip_version = TEST_CONFIG["pip_version_override"]
+ session.install(f"pip=={pip_version}")
+ """Runs py.test for a particular project."""
+ concurrent_args = []
+ if os.path.exists("requirements.txt"):
+ if os.path.exists("constraints.txt"):
+ session.install("-r", "requirements.txt", "-c", "constraints.txt")
+ else:
+ session.install("-r", "requirements.txt")
+ with open("requirements.txt") as rfile:
+ packages = rfile.read()
+
+ if os.path.exists("requirements-test.txt"):
+ if os.path.exists("constraints-test.txt"):
+ session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt")
+ else:
+ session.install("-r", "requirements-test.txt")
+ with open("requirements-test.txt") as rtfile:
+ packages += rtfile.read()
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ if "pytest-parallel" in packages:
+ concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"])
+ elif "pytest-xdist" in packages:
+ concurrent_args.extend(["-n", "auto"])
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars(),
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session: nox.sessions.Session) -> None:
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root() -> Optional[str]:
+ """Returns the root folder of the project."""
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session: nox.sessions.Session, path: str) -> None:
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/samples/notebooks/noxfile_config.py b/samples/notebooks/noxfile_config.py
new file mode 100644
index 000000000..315bd5be8
--- /dev/null
+++ b/samples/notebooks/noxfile_config.py
@@ -0,0 +1,40 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Default TEST_CONFIG_OVERRIDE for python repos.
+
+# You can copy this file into your directory, then it will be inported from
+# the noxfile.py.
+
+# The source of truth:
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
+
+TEST_CONFIG_OVERRIDE = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": [
+ "2.7",
+ # TODO: Enable 3.10 once there is a geopandas/fiona release.
+ # https://github.com/Toblerity/Fiona/issues/1043
+ "3.10",
+ ],
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
diff --git a/samples/notebooks/requirements-test.txt b/samples/notebooks/requirements-test.txt
new file mode 100644
index 000000000..514f09705
--- /dev/null
+++ b/samples/notebooks/requirements-test.txt
@@ -0,0 +1,3 @@
+google-cloud-testutils==1.3.3
+pytest==7.4.0
+mock==5.1.0
diff --git a/samples/notebooks/requirements.txt b/samples/notebooks/requirements.txt
new file mode 100644
index 000000000..22c46297f
--- /dev/null
+++ b/samples/notebooks/requirements.txt
@@ -0,0 +1,10 @@
+db-dtypes==1.1.1
+google-cloud-bigquery==3.11.4
+google-cloud-bigquery-storage==2.22.0
+ipython===7.31.1; python_version == '3.7'
+ipython===8.0.1; python_version == '3.8'
+ipython==8.14.0; python_version >= '3.9'
+matplotlib===3.5.3; python_version == '3.7'
+matplotlib==3.7.2; python_version >= '3.8'
+pandas===1.3.5; python_version == '3.7'
+pandas==2.0.3; python_version >= '3.8'
diff --git a/samples/snippets/client_query.py b/samples/snippets/client_query.py
new file mode 100644
index 000000000..ccae2e8bd
--- /dev/null
+++ b/samples/snippets/client_query.py
@@ -0,0 +1,37 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def client_query() -> None:
+ # [START bigquery_query]
+ from google.cloud import bigquery
+
+ # Construct a BigQuery client object.
+ client = bigquery.Client()
+
+ query = """
+ SELECT name, SUM(number) as total_people
+ FROM `bigquery-public-data.usa_names.usa_1910_2013`
+ WHERE state = 'TX'
+ GROUP BY name, state
+ ORDER BY total_people DESC
+ LIMIT 20
+ """
+ rows = client.query_and_wait(query) # Make an API request.
+
+ print("The query data:")
+ for row in rows:
+ # Row values can be accessed by field name or index.
+ print("name={}, count={}".format(row[0], row["total_people"]))
+ # [END bigquery_query]
diff --git a/samples/snippets/client_query_test.py b/samples/snippets/client_query_test.py
new file mode 100644
index 000000000..1bc83a230
--- /dev/null
+++ b/samples/snippets/client_query_test.py
@@ -0,0 +1,38 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing
+
+import client_query # type: ignore
+
+if typing.TYPE_CHECKING:
+ import pytest
+
+
+def test_client_query(capsys: "pytest.CaptureFixture[str]") -> None:
+ client_query.client_query()
+ out, _ = capsys.readouterr()
+ assert "The query data:" in out
+ assert "name=James, count=272793" in out
+
+
+def test_client_query_job_optional(
+ capsys: "pytest.CaptureFixture[str]", monkeypatch: "pytest.MonkeyPatch"
+) -> None:
+ monkeypatch.setenv("QUERY_PREVIEW_ENABLED", "true")
+
+ client_query.client_query()
+ out, _ = capsys.readouterr()
+ assert "The query data:" in out
+ assert "name=James, count=272793" in out
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 1224cbe21..3b7135946 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index da99249d2..f49c7494f 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1,16 +1 @@
-db-dtypes==1.1.1
-google-cloud-bigquery==3.11.4
-google-cloud-bigquery-storage==2.22.0
-google-auth-oauthlib==1.0.0
-grpcio==1.57.0
-ipywidgets==8.1.0
-ipython===7.31.1; python_version == '3.7'
-ipython===8.0.1; python_version == '3.8'
-ipython==8.14.0; python_version >= '3.9'
-matplotlib===3.5.3; python_version == '3.7'
-matplotlib==3.7.2; python_version >= '3.8'
-pandas===1.3.5; python_version == '3.7'
-pandas==2.0.3; python_version >= '3.8'
-pyarrow==12.0.1
-pytz==2023.3
-typing-extensions==4.7.1
+google-cloud-bigquery==3.11.4
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 4e87b3b84..9fbc91ecb 100644
--- a/setup.py
+++ b/setup.py
@@ -29,20 +29,16 @@
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "grpcio >= 1.47.0, < 2.0dev", # https://github.com/googleapis/python-bigquery/issues/1262
- "grpcio >= 1.49.1, < 2.0dev; python_version>='3.11'",
# NOTE: Maintainers, please do not require google-api-core>=2.x.x
# Until this issue is closed
# https://github.com/googleapis/google-cloud-python/issues/10566
- "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0",
- "proto-plus >= 1.15.0, <2.0.0dev",
+ "google-api-core >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0",
# NOTE: Maintainers, please do not require google-cloud-core>=2.x.x
# Until this issue is closed
# https://github.com/googleapis/google-cloud-python/issues/10566
"google-cloud-core >= 1.6.0, <3.0.0dev",
"google-resumable-media >= 0.6.0, < 3.0dev",
"packaging >= 20.0.0",
- "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", # For the legacy proto-based types.
"python-dateutil >= 2.7.2, <3.0dev",
"requests >= 2.21.0, < 3.0.0dev",
]
@@ -66,6 +62,7 @@
"pandas>=1.1.0",
pyarrow_dependency,
"db-dtypes>=0.3.0,<2.0.0dev",
+ "importlib_metadata>=1.0.0; python_version<'3.8'",
],
"ipywidgets": [
"ipywidgets>=7.7.0",
@@ -82,6 +79,10 @@
"opentelemetry-sdk >= 1.1.0",
"opentelemetry-instrumentation >= 0.20b0",
],
+ "bigquery_v2": [
+ "proto-plus >= 1.15.0, <2.0.0dev",
+ "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", # For the legacy proto-based types.
+ ],
}
all_extras = []
@@ -108,16 +109,10 @@
# benchmarks, etc.
packages = [
package
- for package in setuptools.PEP420PackageFinder.find()
+ for package in setuptools.find_namespace_packages()
if package.startswith("google")
]
-# Determine which namespaces are needed.
-namespaces = ["google"]
-if "google.cloud" in packages:
- namespaces.append("google.cloud")
-
-
setuptools.setup(
name=name,
version=version,
@@ -138,12 +133,12 @@
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
- namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
python_requires=">=3.7",
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/system/test_client.py b/tests/system/test_client.py
index 09606590e..7cea8cfa4 100644
--- a/tests/system/test_client.py
+++ b/tests/system/test_client.py
@@ -13,6 +13,7 @@
# limitations under the License.
import base64
+import copy
import csv
import datetime
import decimal
@@ -237,6 +238,22 @@ def test_create_dataset(self):
self.assertTrue(_dataset_exists(dataset))
self.assertEqual(dataset.dataset_id, DATASET_ID)
self.assertEqual(dataset.project, Config.CLIENT.project)
+ self.assertIs(dataset.is_case_insensitive, False)
+
+ def test_create_dataset_case_sensitive(self):
+ DATASET_ID = _make_dataset_id("create_cs_dataset")
+ dataset = self.temp_dataset(DATASET_ID, is_case_insensitive=False)
+ self.assertIs(dataset.is_case_insensitive, False)
+
+ def test_create_dataset_case_insensitive(self):
+ DATASET_ID = _make_dataset_id("create_ci_dataset")
+ dataset = self.temp_dataset(DATASET_ID, is_case_insensitive=True)
+ self.assertIs(dataset.is_case_insensitive, True)
+
+ def test_create_dataset_max_time_travel_hours(self):
+ DATASET_ID = _make_dataset_id("create_ci_dataset")
+ dataset = self.temp_dataset(DATASET_ID, max_time_travel_hours=24 * 2)
+ self.assertEqual(int(dataset.max_time_travel_hours), 24 * 2)
def test_get_dataset(self):
dataset_id = _make_dataset_id("get_dataset")
@@ -278,16 +295,19 @@ def test_update_dataset(self):
self.assertIsNone(dataset.friendly_name)
self.assertIsNone(dataset.description)
self.assertEqual(dataset.labels, {})
+ self.assertIs(dataset.is_case_insensitive, False)
dataset.friendly_name = "Friendly"
dataset.description = "Description"
dataset.labels = {"priority": "high", "color": "blue"}
+ dataset.is_case_insensitive = True
ds2 = Config.CLIENT.update_dataset(
- dataset, ("friendly_name", "description", "labels")
+ dataset, ("friendly_name", "description", "labels", "is_case_insensitive")
)
self.assertEqual(ds2.friendly_name, "Friendly")
self.assertEqual(ds2.description, "Description")
self.assertEqual(ds2.labels, {"priority": "high", "color": "blue"})
+ self.assertIs(ds2.is_case_insensitive, True)
ds2.labels = {
"color": "green", # change
@@ -342,6 +362,48 @@ def test_create_table(self):
self.assertTrue(_table_exists(table))
self.assertEqual(table.table_id, table_id)
+ def test_create_tables_in_case_insensitive_dataset(self):
+ ci_dataset = self.temp_dataset(
+ _make_dataset_id("create_table"), is_case_insensitive=True
+ )
+ table_arg = Table(ci_dataset.table("test_table2"), schema=SCHEMA)
+ tablemc_arg = Table(ci_dataset.table("Test_taBLe2")) # same name, in Mixed Case
+
+ table = helpers.retry_403(Config.CLIENT.create_table)(table_arg)
+ self.to_delete.insert(0, table)
+
+ self.assertTrue(_table_exists(table_arg))
+ self.assertTrue(_table_exists(tablemc_arg))
+ self.assertIs(ci_dataset.is_case_insensitive, True)
+
+ def test_create_tables_in_case_sensitive_dataset(self):
+ ci_dataset = self.temp_dataset(
+ _make_dataset_id("create_table"), is_case_insensitive=False
+ )
+ table_arg = Table(ci_dataset.table("test_table3"), schema=SCHEMA)
+ tablemc_arg = Table(ci_dataset.table("Test_taBLe3")) # same name, in Mixed Case
+
+ table = helpers.retry_403(Config.CLIENT.create_table)(table_arg)
+ self.to_delete.insert(0, table)
+
+ self.assertTrue(_table_exists(table_arg))
+ self.assertFalse(_table_exists(tablemc_arg))
+ self.assertIs(ci_dataset.is_case_insensitive, False)
+
+ def test_create_tables_in_default_sensitivity_dataset(self):
+ dataset = self.temp_dataset(_make_dataset_id("create_table"))
+ table_arg = Table(dataset.table("test_table4"), schema=SCHEMA)
+ tablemc_arg = Table(
+ dataset.table("Test_taBLe4")
+ ) # same name, in MC (Mixed Case)
+
+ table = helpers.retry_403(Config.CLIENT.create_table)(table_arg)
+ self.to_delete.insert(0, table)
+
+ self.assertTrue(_table_exists(table_arg))
+ self.assertFalse(_table_exists(tablemc_arg))
+ self.assertIs(dataset.is_case_insensitive, False)
+
def test_create_table_with_real_custom_policy(self):
from google.cloud.bigquery.schema import PolicyTagList
@@ -2175,6 +2237,41 @@ def test_create_tvf_routine(self):
]
assert result_rows == expected
+ def test_create_routine_w_data_governance(self):
+ routine_name = "routine_with_data_governance"
+ dataset = self.temp_dataset(_make_dataset_id("create_routine"))
+
+ routine = bigquery.Routine(
+ dataset.routine(routine_name),
+ type_="SCALAR_FUNCTION",
+ language="SQL",
+ body="x",
+ arguments=[
+ bigquery.RoutineArgument(
+ name="x",
+ data_type=bigquery.StandardSqlDataType(
+ type_kind=bigquery.StandardSqlTypeNames.INT64
+ ),
+ )
+ ],
+ data_governance_type="DATA_MASKING",
+ return_type=bigquery.StandardSqlDataType(
+ type_kind=bigquery.StandardSqlTypeNames.INT64
+ ),
+ )
+ routine_original = copy.deepcopy(routine)
+
+ client = Config.CLIENT
+ routine_new = client.create_routine(routine)
+
+ assert routine_new.reference == routine_original.reference
+ assert routine_new.type_ == routine_original.type_
+ assert routine_new.language == routine_original.language
+ assert routine_new.body == routine_original.body
+ assert routine_new.arguments == routine_original.arguments
+ assert routine_new.return_type == routine_original.return_type
+ assert routine_new.data_governance_type == routine_original.data_governance_type
+
def test_create_table_rows_fetch_nested_schema(self):
table_name = "test_table"
dataset = self.temp_dataset(_make_dataset_id("create_table_nested_schema"))
@@ -2299,9 +2396,12 @@ def temp_dataset(self, dataset_id, *args, **kwargs):
dataset = Dataset(dataset_ref)
if kwargs.get("location"):
dataset.location = kwargs.get("location")
+ if kwargs.get("max_time_travel_hours"):
+ dataset.max_time_travel_hours = kwargs.get("max_time_travel_hours")
if kwargs.get("default_rounding_mode"):
dataset.default_rounding_mode = kwargs.get("default_rounding_mode")
-
+ if kwargs.get("is_case_insensitive"):
+ dataset.is_case_insensitive = kwargs.get("is_case_insensitive")
dataset = helpers.retry_403(Config.CLIENT.create_dataset)(dataset)
self.to_delete.append(dataset)
return dataset
diff --git a/tests/system/test_pandas.py b/tests/system/test_pandas.py
index a46f8e3df..e93f245c0 100644
--- a/tests/system/test_pandas.py
+++ b/tests/system/test_pandas.py
@@ -23,9 +23,13 @@
import warnings
import google.api_core.retry
-import pkg_resources
import pytest
+try:
+ import importlib.metadata as metadata
+except ImportError:
+ import importlib_metadata as metadata
+
from google.cloud import bigquery
from google.cloud.bigquery import enums
@@ -42,11 +46,9 @@
)
if pandas is not None:
- PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
+ PANDAS_INSTALLED_VERSION = metadata.version("pandas")
else:
- PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0")
-
-PANDAS_INT64_VERSION = pkg_resources.parse_version("1.0.0")
+ PANDAS_INSTALLED_VERSION = "0.0.0"
class MissingDataError(Exception):
@@ -310,10 +312,7 @@ def test_load_table_from_dataframe_w_automatic_schema(bigquery_client, dataset_i
]
-@pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION < PANDAS_INT64_VERSION,
- reason="Only `pandas version >=1.0.0` is supported",
-)
+@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
def test_load_table_from_dataframe_w_nullable_int64_datatype(
bigquery_client, dataset_id
):
@@ -342,7 +341,7 @@ def test_load_table_from_dataframe_w_nullable_int64_datatype(
@pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION < PANDAS_INT64_VERSION,
+ PANDAS_INSTALLED_VERSION[0:2].startswith("0."),
reason="Only `pandas version >=1.0.0` is supported",
)
def test_load_table_from_dataframe_w_nullable_int64_datatype_automatic_schema(
@@ -428,8 +427,7 @@ def test_load_table_from_dataframe_w_nulls(bigquery_client, dataset_id):
def test_load_table_from_dataframe_w_required(bigquery_client, dataset_id):
- """Test that a DataFrame with required columns can be uploaded if a
- BigQuery schema is specified.
+ """Test that a DataFrame can be uploaded to a table with required columns.
See: https://github.com/googleapis/google-cloud-python/issues/8093
"""
@@ -440,7 +438,6 @@ def test_load_table_from_dataframe_w_required(bigquery_client, dataset_id):
records = [{"name": "Chip", "age": 2}, {"name": "Dale", "age": 3}]
dataframe = pandas.DataFrame(records, columns=["name", "age"])
- job_config = bigquery.LoadJobConfig(schema=table_schema)
table_id = "{}.{}.load_table_from_dataframe_w_required".format(
bigquery_client.project, dataset_id
)
@@ -451,15 +448,50 @@ def test_load_table_from_dataframe_w_required(bigquery_client, dataset_id):
bigquery.Table(table_id, schema=table_schema)
)
- job_config = bigquery.LoadJobConfig(schema=table_schema)
- load_job = bigquery_client.load_table_from_dataframe(
- dataframe, table_id, job_config=job_config
- )
+ load_job = bigquery_client.load_table_from_dataframe(dataframe, table_id)
load_job.result()
table = bigquery_client.get_table(table)
assert tuple(table.schema) == table_schema
assert table.num_rows == 2
+ for field in table.schema:
+ assert field.mode == "REQUIRED"
+
+
+def test_load_table_from_dataframe_w_required_but_local_nulls_fails(
+ bigquery_client, dataset_id
+):
+ """Test that a DataFrame with nulls can't be uploaded to a table with
+ required columns.
+
+ See: https://github.com/googleapis/python-bigquery/issues/1692
+ """
+ table_schema = (
+ bigquery.SchemaField("name", "STRING", mode="REQUIRED"),
+ bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"),
+ )
+
+ records = [
+ {"name": "Chip", "age": 2},
+ {"name": "Dale", "age": 3},
+ {"name": None, "age": None},
+ {"name": "Alvin", "age": 4},
+ ]
+ dataframe = pandas.DataFrame(records, columns=["name", "age"])
+ table_id = (
+ "{}.{}.load_table_from_dataframe_w_required_but_local_nulls_fails".format(
+ bigquery_client.project, dataset_id
+ )
+ )
+
+ # Create the table before loading so that schema mismatch errors are
+ # identified.
+ helpers.retry_403(bigquery_client.create_table)(
+ bigquery.Table(table_id, schema=table_schema)
+ )
+
+ with pytest.raises(google.api_core.exceptions.BadRequest, match="null"):
+ bigquery_client.load_table_from_dataframe(dataframe, table_id).result()
def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id):
@@ -1010,9 +1042,7 @@ def test_list_rows_max_results_w_bqstorage(bigquery_client):
assert len(dataframe.index) == 100
-@pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason=""
-)
+@pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
@pytest.mark.parametrize(
("max_results",),
(
diff --git a/tests/unit/job/test_base.py b/tests/unit/job/test_base.py
index a662e92d4..5635d0e32 100644
--- a/tests/unit/job/test_base.py
+++ b/tests/unit/job/test_base.py
@@ -1228,3 +1228,18 @@ def test_labels_setter(self):
job_config = self._make_one()
job_config.labels = labels
self.assertEqual(job_config._properties["labels"], labels)
+
+ def test_job_timeout_ms_raises_valueerror(self):
+ # Confirm that attempting to set a non-integer values will raise an Error.
+ with pytest.raises(ValueError):
+ job_config = self._make_one()
+ job_config.job_timeout_ms = "WillRaiseError"
+
+ def test_job_timeout_ms(self):
+ # Confirm that default status is None.
+ job_config = self._make_one()
+ assert job_config.job_timeout_ms is None
+
+ # Confirm that integers get converted to strings.
+ job_config.job_timeout_ms = 5000
+ assert job_config.job_timeout_ms == "5000" # int is converted to string
diff --git a/tests/unit/job/test_query.py b/tests/unit/job/test_query.py
index 26f1f2a73..776234b5b 100644
--- a/tests/unit/job/test_query.py
+++ b/tests/unit/job/test_query.py
@@ -25,6 +25,7 @@
import requests
from google.cloud.bigquery.client import _LIST_ROWS_FROM_QUERY_RESULTS_FIELDS
+import google.cloud.bigquery._job_helpers
import google.cloud.bigquery.query
from google.cloud.bigquery.table import _EmptyRowIterator
@@ -952,6 +953,7 @@ def test_result(self):
},
"schema": {"fields": [{"name": "col1", "type": "STRING"}]},
"totalRows": "2",
+ "queryId": "abc-def",
}
job_resource = self._make_resource(started=True, location="EU")
job_resource_done = self._make_resource(started=True, ended=True, location="EU")
@@ -980,6 +982,10 @@ def test_result(self):
rows = list(result)
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0].col1, "abc")
+ self.assertEqual(result.job_id, self.JOB_ID)
+ self.assertEqual(result.location, "EU")
+ self.assertEqual(result.project, self.PROJECT)
+ self.assertEqual(result.query_id, "abc-def")
# Test that the total_rows property has changed during iteration, based
# on the response from tabledata.list.
self.assertEqual(result.total_rows, 1)
@@ -1023,6 +1029,12 @@ def test_result_dry_run(self):
calls = conn.api_request.mock_calls
self.assertIsInstance(result, _EmptyRowIterator)
self.assertEqual(calls, [])
+ self.assertEqual(result.location, "EU")
+ self.assertEqual(result.project, self.PROJECT)
+ # Intentionally omit job_id and query_id since this doesn't
+ # actually correspond to a finished query job.
+ self.assertIsNone(result.job_id)
+ self.assertIsNone(result.query_id)
def test_result_with_done_job_calls_get_query_results(self):
query_resource_done = {
@@ -1070,6 +1082,114 @@ def test_result_with_done_job_calls_get_query_results(self):
timeout=None,
)
conn.api_request.assert_has_calls([query_results_call, query_results_page_call])
+ assert conn.api_request.call_count == 2
+
+ def test_result_with_done_jobs_query_response_doesnt_call_get_query_results(self):
+ """With a done result from jobs.query, we don't need to call
+ jobs.getQueryResults to wait for the query to finish.
+
+ jobs.get is still called because there is an assumption that after
+ QueryJob.result(), all job metadata is available locally.
+ """
+ job_resource = self._make_resource(started=True, ended=True, location="EU")
+ conn = make_connection(job_resource)
+ client = _make_client(self.PROJECT, connection=conn)
+ query_resource_done = {
+ "jobComplete": True,
+ "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID},
+ "schema": {"fields": [{"name": "col1", "type": "STRING"}]},
+ "rows": [{"f": [{"v": "abc"}]}],
+ "totalRows": "1",
+ }
+ job = google.cloud.bigquery._job_helpers._to_query_job(
+ client,
+ "SELECT 'abc' AS col1",
+ request_config=None,
+ query_response=query_resource_done,
+ )
+ assert job.state == "DONE"
+
+ result = job.result()
+
+ rows = list(result)
+ self.assertEqual(len(rows), 1)
+ self.assertEqual(rows[0].col1, "abc")
+ job_path = f"/projects/{self.PROJECT}/jobs/{self.JOB_ID}"
+ conn.api_request.assert_called_once_with(
+ method="GET",
+ path=job_path,
+ query_params={},
+ timeout=None,
+ )
+
+ def test_result_with_done_jobs_query_response_and_page_size_invalidates_cache(self):
+ """We don't call jobs.query with a page size, so if the user explicitly
+ requests a certain size, invalidate the cache.
+ """
+ # Arrange
+ job_resource = self._make_resource(
+ started=True, ended=True, location="asia-northeast1"
+ )
+ query_resource_done = {
+ "jobComplete": True,
+ "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID},
+ "schema": {"fields": [{"name": "col1", "type": "STRING"}]},
+ "rows": [{"f": [{"v": "abc"}]}],
+ "pageToken": "initial-page-token-shouldnt-be-used",
+ "totalRows": "4",
+ }
+ query_page_resource = {
+ "totalRows": 4,
+ "pageToken": "some-page-token",
+ "rows": [
+ {"f": [{"v": "row1"}]},
+ {"f": [{"v": "row2"}]},
+ {"f": [{"v": "row3"}]},
+ ],
+ }
+ query_page_resource_2 = {"totalRows": 4, "rows": [{"f": [{"v": "row4"}]}]}
+ conn = make_connection(job_resource, query_page_resource, query_page_resource_2)
+ client = _make_client(self.PROJECT, connection=conn)
+ job = google.cloud.bigquery._job_helpers._to_query_job(
+ client,
+ "SELECT col1 FROM table",
+ request_config=None,
+ query_response=query_resource_done,
+ )
+ assert job.state == "DONE"
+
+ # Act
+ result = job.result(page_size=3)
+
+ # Assert
+ actual_rows = list(result)
+ self.assertEqual(len(actual_rows), 4)
+
+ query_results_path = f"/projects/{self.PROJECT}/queries/{self.JOB_ID}"
+ query_page_1_call = mock.call(
+ method="GET",
+ path=query_results_path,
+ query_params={
+ "maxResults": 3,
+ "fields": _LIST_ROWS_FROM_QUERY_RESULTS_FIELDS,
+ "location": "asia-northeast1",
+ "formatOptions.useInt64Timestamp": True,
+ },
+ timeout=None,
+ )
+ query_page_2_call = mock.call(
+ method="GET",
+ path=query_results_path,
+ query_params={
+ "pageToken": "some-page-token",
+ "maxResults": 3,
+ "fields": _LIST_ROWS_FROM_QUERY_RESULTS_FIELDS,
+ "location": "asia-northeast1",
+ "formatOptions.useInt64Timestamp": True,
+ },
+ timeout=None,
+ )
+ conn.api_request.assert_has_calls([query_page_1_call, query_page_2_call])
def test_result_with_max_results(self):
from google.cloud.bigquery.table import RowIterator
@@ -1180,16 +1300,21 @@ def test_result_w_empty_schema(self):
"jobComplete": True,
"jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID},
"schema": {"fields": []},
+ "queryId": "xyz-abc",
}
connection = make_connection(query_resource, query_resource)
client = _make_client(self.PROJECT, connection=connection)
- resource = self._make_resource(ended=True)
+ resource = self._make_resource(ended=True, location="asia-northeast1")
job = self._get_target_class().from_api_repr(resource, client)
result = job.result()
self.assertIsInstance(result, _EmptyRowIterator)
self.assertEqual(list(result), [])
+ self.assertEqual(result.project, self.PROJECT)
+ self.assertEqual(result.job_id, self.JOB_ID)
+ self.assertEqual(result.location, "asia-northeast1")
+ self.assertEqual(result.query_id, "xyz-abc")
def test_result_invokes_begins(self):
begun_resource = self._make_resource()
diff --git a/tests/unit/job/test_query_pandas.py b/tests/unit/job/test_query_pandas.py
index f4c7eb06e..6189830ff 100644
--- a/tests/unit/job/test_query_pandas.py
+++ b/tests/unit/job/test_query_pandas.py
@@ -17,7 +17,6 @@
import json
import mock
-import pkg_resources
import pytest
@@ -45,14 +44,19 @@
except (ImportError, AttributeError): # pragma: NO COVER
tqdm = None
+try:
+ import importlib.metadata as metadata
+except ImportError:
+ import importlib_metadata as metadata
+
from ..helpers import make_connection
from .helpers import _make_client
from .helpers import _make_job_resource
if pandas is not None:
- PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
+ PANDAS_INSTALLED_VERSION = metadata.version("pandas")
else:
- PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0")
+ PANDAS_INSTALLED_VERSION = "0.0.0"
pandas = pytest.importorskip("pandas")
@@ -560,7 +564,7 @@ def test_to_dataframe_bqstorage(table_read_options_kwarg):
[name_array, age_array], schema=arrow_schema
)
connection = make_connection(query_resource)
- client = _make_client(connection=connection)
+ client = _make_client(connection=connection, project="bqstorage-billing-project")
job = target_class.from_api_repr(resource, client)
session = bigquery_storage.types.ReadSession()
session.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes()
@@ -597,7 +601,9 @@ def test_to_dataframe_bqstorage(table_read_options_kwarg):
**table_read_options_kwarg,
)
bqstorage_client.create_read_session.assert_called_once_with(
- parent=f"projects/{client.project}",
+ # The billing project can differ from the data project. Make sure we
+ # are charging to the billing project, not the data project.
+ parent="projects/bqstorage-billing-project",
read_session=expected_session,
max_stream_count=0, # Use default number of streams for best performance.
)
@@ -618,7 +624,7 @@ def test_to_dataframe_bqstorage_no_pyarrow_compression():
"schema": {"fields": [{"name": "name", "type": "STRING", "mode": "NULLABLE"}]},
}
connection = make_connection(query_resource)
- client = _make_client(connection=connection)
+ client = _make_client(connection=connection, project="bqstorage-billing-project")
job = target_class.from_api_repr(resource, client)
bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient)
session = bigquery_storage.types.ReadSession()
@@ -646,15 +652,15 @@ def test_to_dataframe_bqstorage_no_pyarrow_compression():
data_format=bigquery_storage.DataFormat.ARROW,
)
bqstorage_client.create_read_session.assert_called_once_with(
- parent=f"projects/{client.project}",
+ # The billing project can differ from the data project. Make sure we
+ # are charging to the billing project, not the data project.
+ parent="projects/bqstorage-billing-project",
read_session=expected_session,
max_stream_count=0,
)
-@pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason=""
-)
+@pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`")
def test_to_dataframe_column_dtypes():
from google.cloud.bigquery.job import QueryJob as target_class
diff --git a/tests/unit/routine/test_routine.py b/tests/unit/routine/test_routine.py
index 87767200c..acd3bc40e 100644
--- a/tests/unit/routine/test_routine.py
+++ b/tests/unit/routine/test_routine.py
@@ -154,6 +154,7 @@ def test_from_api_repr(target_class):
"foo": "bar",
},
},
+ "dataGovernanceType": "DATA_MASKING",
}
actual_routine = target_class.from_api_repr(resource)
@@ -192,6 +193,7 @@ def test_from_api_repr(target_class):
assert actual_routine.remote_function_options.connection == "connection_string"
assert actual_routine.remote_function_options.max_batching_rows == 50
assert actual_routine.remote_function_options.user_defined_context == {"foo": "bar"}
+ assert actual_routine.data_governance_type == "DATA_MASKING"
def test_from_api_repr_tvf_function(target_class):
@@ -294,6 +296,7 @@ def test_from_api_repr_w_minimal_resource(target_class):
assert actual_routine.description is None
assert actual_routine.determinism_level is None
assert actual_routine.remote_function_options is None
+ assert actual_routine.data_governance_type is None
def test_from_api_repr_w_unknown_fields(target_class):
@@ -428,6 +431,20 @@ def test_from_api_repr_w_unknown_fields(target_class):
"determinismLevel": bigquery.DeterminismLevel.DETERMINISM_LEVEL_UNSPECIFIED
},
),
+ (
+ {
+ "arguments": [{"name": "x", "dataType": {"typeKind": "INT64"}}],
+ "definitionBody": "x * 3",
+ "language": "SQL",
+ "returnType": {"typeKind": "INT64"},
+ "routineType": "SCALAR_FUNCTION",
+ "description": "A routine description.",
+ "determinismLevel": bigquery.DeterminismLevel.DETERMINISM_LEVEL_UNSPECIFIED,
+ "dataGovernanceType": "DATA_MASKING",
+ },
+ ["data_governance_type"],
+ {"dataGovernanceType": "DATA_MASKING"},
+ ),
(
{},
[
@@ -554,6 +571,36 @@ def test_set_remote_function_options_w_none(object_under_test):
assert object_under_test._properties["remoteFunctionOptions"] is None
+def test_set_data_governance_type_w_none(object_under_test):
+ object_under_test.data_governance_type = None
+ assert object_under_test.data_governance_type is None
+ assert object_under_test._properties["dataGovernanceType"] is None
+
+
+def test_set_data_governance_type_valid(object_under_test):
+ object_under_test.data_governance_type = "DATA_MASKING"
+ assert object_under_test.data_governance_type == "DATA_MASKING"
+ assert object_under_test._properties["dataGovernanceType"] == "DATA_MASKING"
+
+
+def test_set_data_governance_type_wrong_type(object_under_test):
+ with pytest.raises(ValueError) as exp:
+ object_under_test.data_governance_type = 1
+ assert "invalid data_governance_type" in str(exp)
+ assert object_under_test.data_governance_type is None
+ assert object_under_test._properties.get("dataGovernanceType") is None
+
+
+def test_set_data_governance_type_wrong_str(object_under_test):
+ """Client does not verify the content of data_governance_type string to be
+ compatible with future upgrades. If the value is not supported, BigQuery
+ itself will report an error.
+ """
+ object_under_test.data_governance_type = "RANDOM_STRING"
+ assert object_under_test.data_governance_type == "RANDOM_STRING"
+ assert object_under_test._properties["dataGovernanceType"] == "RANDOM_STRING"
+
+
def test_repr(target_class):
model = target_class("my-proj.my_dset.my_routine")
actual_routine = repr(model)
diff --git a/tests/unit/test__job_helpers.py b/tests/unit/test__job_helpers.py
index 012352f4e..f2fe32d94 100644
--- a/tests/unit/test__job_helpers.py
+++ b/tests/unit/test__job_helpers.py
@@ -12,15 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import functools
from typing import Any, Dict, Optional
from unittest import mock
+import freezegun
+import google.api_core.exceptions
from google.api_core import retry as retries
import pytest
from google.cloud.bigquery.client import Client
from google.cloud.bigquery import _job_helpers
-from google.cloud.bigquery.job.query import QueryJob, QueryJobConfig
+from google.cloud.bigquery.job import query as job_query
from google.cloud.bigquery.query import ConnectionProperty, ScalarQueryParameter
@@ -55,9 +58,9 @@ def make_query_response(
("job_config", "expected"),
(
(None, make_query_request()),
- (QueryJobConfig(), make_query_request()),
+ (job_query.QueryJobConfig(), make_query_request()),
(
- QueryJobConfig(default_dataset="my-project.my_dataset"),
+ job_query.QueryJobConfig(default_dataset="my-project.my_dataset"),
make_query_request(
{
"defaultDataset": {
@@ -67,17 +70,17 @@ def make_query_response(
}
),
),
- (QueryJobConfig(dry_run=True), make_query_request({"dryRun": True})),
+ (job_query.QueryJobConfig(dry_run=True), make_query_request({"dryRun": True})),
(
- QueryJobConfig(use_query_cache=False),
+ job_query.QueryJobConfig(use_query_cache=False),
make_query_request({"useQueryCache": False}),
),
(
- QueryJobConfig(use_legacy_sql=True),
+ job_query.QueryJobConfig(use_legacy_sql=True),
make_query_request({"useLegacySql": True}),
),
(
- QueryJobConfig(
+ job_query.QueryJobConfig(
query_parameters=[
ScalarQueryParameter("named_param1", "STRING", "param-value"),
ScalarQueryParameter("named_param2", "INT64", 123),
@@ -102,7 +105,7 @@ def make_query_response(
),
),
(
- QueryJobConfig(
+ job_query.QueryJobConfig(
query_parameters=[
ScalarQueryParameter(None, "STRING", "param-value"),
ScalarQueryParameter(None, "INT64", 123),
@@ -125,7 +128,7 @@ def make_query_response(
),
),
(
- QueryJobConfig(
+ job_query.QueryJobConfig(
connection_properties=[
ConnectionProperty(key="time_zone", value="America/Chicago"),
ConnectionProperty(key="session_id", value="abcd-efgh-ijkl-mnop"),
@@ -141,17 +144,18 @@ def make_query_response(
),
),
(
- QueryJobConfig(labels={"abc": "def"}),
+ job_query.QueryJobConfig(labels={"abc": "def"}),
make_query_request({"labels": {"abc": "def"}}),
),
(
- QueryJobConfig(maximum_bytes_billed=987654),
+ job_query.QueryJobConfig(maximum_bytes_billed=987654),
make_query_request({"maximumBytesBilled": "987654"}),
),
),
)
def test__to_query_request(job_config, expected):
- result = _job_helpers._to_query_request(job_config)
+ result = _job_helpers._to_query_request(job_config, query="SELECT 1")
+ expected["query"] = "SELECT 1"
assert result == expected
@@ -160,7 +164,9 @@ def test__to_query_job_defaults():
response = make_query_response(
job_id="test-job", project_id="some-project", location="asia-northeast1"
)
- job: QueryJob = _job_helpers._to_query_job(mock_client, "query-str", None, response)
+ job: job_query.QueryJob = _job_helpers._to_query_job(
+ mock_client, "query-str", None, response
+ )
assert job.query == "query-str"
assert job._client is mock_client
assert job.job_id == "test-job"
@@ -175,9 +181,9 @@ def test__to_query_job_dry_run():
response = make_query_response(
job_id="test-job", project_id="some-project", location="asia-northeast1"
)
- job_config: QueryJobConfig = QueryJobConfig()
+ job_config: job_query.QueryJobConfig = job_query.QueryJobConfig()
job_config.dry_run = True
- job: QueryJob = _job_helpers._to_query_job(
+ job: job_query.QueryJob = _job_helpers._to_query_job(
mock_client, "query-str", job_config, response
)
assert job.dry_run is True
@@ -193,7 +199,9 @@ def test__to_query_job_dry_run():
def test__to_query_job_sets_state(completed, expected_state):
mock_client = mock.create_autospec(Client)
response = make_query_response(completed=completed)
- job: QueryJob = _job_helpers._to_query_job(mock_client, "query-str", None, response)
+ job: job_query.QueryJob = _job_helpers._to_query_job(
+ mock_client, "query-str", None, response
+ )
assert job.state == expected_state
@@ -206,7 +214,9 @@ def test__to_query_job_sets_errors():
{"message": "something else went wrong"},
]
)
- job: QueryJob = _job_helpers._to_query_job(mock_client, "query-str", None, response)
+ job: job_query.QueryJob = _job_helpers._to_query_job(
+ mock_client, "query-str", None, response
+ )
assert len(job.errors) == 2
# If we got back a response instead of an HTTP error status code, most
# likely the job didn't completely fail.
@@ -313,6 +323,717 @@ def test_query_jobs_query_sets_timeout(timeout, expected_timeout):
assert request["timeoutMs"] == expected_timeout
+def test_query_and_wait_uses_jobs_insert():
+ """With unsupported features, call jobs.insert instead of jobs.query."""
+ client = mock.create_autospec(Client)
+ client._call_api.return_value = {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "response-location",
+ },
+ "query": {
+ "query": "SELECT 1",
+ },
+ # Make sure the job has "started"
+ "status": {"state": "DONE"},
+ "jobComplete": True,
+ }
+ job_config = job_query.QueryJobConfig(
+ destination="dest-project.dest_dset.dest_table",
+ )
+ _job_helpers.query_and_wait(
+ client,
+ query="SELECT 1",
+ location="request-location",
+ project="request-project",
+ job_config=job_config,
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+
+ # We should call jobs.insert since jobs.query doesn't support destination.
+ request_path = "/projects/request-project/jobs"
+ client._call_api.assert_any_call(
+ None, # retry,
+ span_name="BigQuery.job.begin",
+ span_attributes={"path": request_path},
+ job_ref=mock.ANY,
+ method="POST",
+ path=request_path,
+ data={
+ "jobReference": {
+ "jobId": mock.ANY,
+ "projectId": "request-project",
+ "location": "request-location",
+ },
+ "configuration": {
+ "query": {
+ "destinationTable": {
+ "projectId": "dest-project",
+ "datasetId": "dest_dset",
+ "tableId": "dest_table",
+ },
+ "useLegacySql": False,
+ "query": "SELECT 1",
+ }
+ },
+ },
+ timeout=None,
+ )
+
+
+def test_query_and_wait_retries_job():
+ freezegun.freeze_time(auto_tick_seconds=100)
+ client = mock.create_autospec(Client)
+ client._call_api.__name__ = "_call_api"
+ client._call_api.__qualname__ = "Client._call_api"
+ client._call_api.__annotations__ = {}
+ client._call_api.__type_params__ = ()
+ client._call_api.side_effect = (
+ google.api_core.exceptions.BadGateway("retry me"),
+ google.api_core.exceptions.InternalServerError("job_retry me"),
+ google.api_core.exceptions.BadGateway("retry me"),
+ {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "response-location",
+ },
+ "jobComplete": True,
+ "schema": {
+ "fields": [
+ {"name": "full_name", "type": "STRING", "mode": "REQUIRED"},
+ {"name": "age", "type": "INT64", "mode": "NULLABLE"},
+ ],
+ },
+ "rows": [
+ {"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]},
+ {"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]},
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ],
+ },
+ )
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="SELECT 1",
+ location="request-location",
+ project="request-project",
+ job_config=None,
+ page_size=None,
+ max_results=None,
+ retry=retries.Retry(
+ lambda exc: isinstance(exc, google.api_core.exceptions.BadGateway),
+ multiplier=1.0,
+ ).with_deadline(
+ 200.0
+ ), # Since auto_tick_seconds is 100, we should get at least 1 retry.
+ job_retry=retries.Retry(
+ lambda exc: isinstance(exc, google.api_core.exceptions.InternalServerError),
+ multiplier=1.0,
+ ).with_deadline(600.0),
+ )
+ assert len(list(rows)) == 4
+
+ # For this code path, where the query has finished immediately, we should
+ # only be calling the jobs.query API and no other request path.
+ request_path = "/projects/request-project/queries"
+ for call in client._call_api.call_args_list:
+ _, kwargs = call
+ assert kwargs["method"] == "POST"
+ assert kwargs["path"] == request_path
+
+
+@freezegun.freeze_time(auto_tick_seconds=100)
+def test_query_and_wait_retries_job_times_out():
+ client = mock.create_autospec(Client)
+ client._call_api.__name__ = "_call_api"
+ client._call_api.__qualname__ = "Client._call_api"
+ client._call_api.__annotations__ = {}
+ client._call_api.__type_params__ = ()
+ client._call_api.side_effect = (
+ google.api_core.exceptions.BadGateway("retry me"),
+ google.api_core.exceptions.InternalServerError("job_retry me"),
+ google.api_core.exceptions.BadGateway("retry me"),
+ google.api_core.exceptions.InternalServerError("job_retry me"),
+ )
+
+ with pytest.raises(google.api_core.exceptions.RetryError) as exc_info:
+ _job_helpers.query_and_wait(
+ client,
+ query="SELECT 1",
+ location="request-location",
+ project="request-project",
+ job_config=None,
+ page_size=None,
+ max_results=None,
+ retry=retries.Retry(
+ lambda exc: isinstance(exc, google.api_core.exceptions.BadGateway),
+ multiplier=1.0,
+ ).with_deadline(
+ 200.0
+ ), # Since auto_tick_seconds is 100, we should get at least 1 retry.
+ job_retry=retries.Retry(
+ lambda exc: isinstance(
+ exc, google.api_core.exceptions.InternalServerError
+ ),
+ multiplier=1.0,
+ ).with_deadline(400.0),
+ )
+
+ assert isinstance(
+ exc_info.value.cause, google.api_core.exceptions.InternalServerError
+ )
+
+
+def test_query_and_wait_sets_job_creation_mode(monkeypatch: pytest.MonkeyPatch):
+ monkeypatch.setenv(
+ "QUERY_PREVIEW_ENABLED",
+ # The comparison should be case insensitive.
+ "TrUe",
+ )
+ client = mock.create_autospec(Client)
+ client._call_api.return_value = {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "response-location",
+ },
+ "jobComplete": True,
+ }
+ _job_helpers.query_and_wait(
+ client,
+ query="SELECT 1",
+ location="request-location",
+ project="request-project",
+ job_config=None,
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+
+ # We should only call jobs.query once, no additional row requests needed.
+ request_path = "/projects/request-project/queries"
+ client._call_api.assert_called_once_with(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": request_path},
+ method="POST",
+ path=request_path,
+ data={
+ "query": "SELECT 1",
+ "location": "request-location",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ "jobCreationMode": "JOB_CREATION_OPTIONAL",
+ },
+ timeout=None,
+ )
+
+
+def test_query_and_wait_sets_location():
+ client = mock.create_autospec(Client)
+ client._call_api.return_value = {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "response-location",
+ },
+ "jobComplete": True,
+ }
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="SELECT 1",
+ location="request-location",
+ project="request-project",
+ job_config=None,
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+ assert rows.location == "response-location"
+
+ # We should only call jobs.query once, no additional row requests needed.
+ request_path = "/projects/request-project/queries"
+ client._call_api.assert_called_once_with(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": request_path},
+ method="POST",
+ path=request_path,
+ data={
+ "query": "SELECT 1",
+ "location": "request-location",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ },
+ timeout=None,
+ )
+
+
+@pytest.mark.parametrize(
+ ("max_results", "page_size", "expected"),
+ [
+ (10, None, 10),
+ (None, 11, 11),
+ (12, 100, 12),
+ (100, 13, 13),
+ ],
+)
+def test_query_and_wait_sets_max_results(max_results, page_size, expected):
+ client = mock.create_autospec(Client)
+ client._call_api.return_value = {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "response-location",
+ },
+ "jobComplete": True,
+ }
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="SELECT 1",
+ location="request-location",
+ project="request-project",
+ job_config=None,
+ retry=None,
+ job_retry=None,
+ page_size=page_size,
+ max_results=max_results,
+ )
+ assert rows.location == "response-location"
+
+ # We should only call jobs.query once, no additional row requests needed.
+ request_path = "/projects/request-project/queries"
+ client._call_api.assert_called_once_with(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": request_path},
+ method="POST",
+ path=request_path,
+ data={
+ "query": "SELECT 1",
+ "location": "request-location",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ "maxResults": expected,
+ },
+ timeout=None,
+ )
+
+
+def test_query_and_wait_caches_completed_query_results_one_page():
+ client = mock.create_autospec(Client)
+ client._call_api.return_value = {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "US",
+ },
+ "jobComplete": True,
+ "queryId": "xyz",
+ "schema": {
+ "fields": [
+ {"name": "full_name", "type": "STRING", "mode": "REQUIRED"},
+ {"name": "age", "type": "INT64", "mode": "NULLABLE"},
+ ],
+ },
+ "rows": [
+ {"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]},
+ {"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]},
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ],
+ # Even though totalRows > len(rows), we should use the presense of a
+ # next page token to decide if there are any more pages.
+ "totalRows": 8,
+ }
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="SELECT full_name, age FROM people;",
+ job_config=None,
+ location=None,
+ project="request-project",
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+ rows_list = list(rows)
+ assert rows.project == "response-project"
+ assert rows.job_id == "abc"
+ assert rows.location == "US"
+ assert rows.query_id == "xyz"
+ assert rows.total_rows == 8
+ assert len(rows_list) == 4
+
+ # We should only call jobs.query once, no additional row requests needed.
+ request_path = "/projects/request-project/queries"
+ client._call_api.assert_called_once_with(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": request_path},
+ method="POST",
+ path=request_path,
+ data={
+ "query": "SELECT full_name, age FROM people;",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ },
+ timeout=None,
+ )
+
+
+def test_query_and_wait_caches_completed_query_results_one_page_no_rows():
+ client = mock.create_autospec(Client)
+ client._call_api.return_value = {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "abc",
+ "location": "US",
+ },
+ "jobComplete": True,
+ "queryId": "xyz",
+ }
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="CREATE TABLE abc;",
+ project="request-project",
+ job_config=None,
+ location=None,
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+ assert rows.project == "response-project"
+ assert rows.job_id == "abc"
+ assert rows.location == "US"
+ assert rows.query_id == "xyz"
+ assert list(rows) == []
+
+ # We should only call jobs.query once, no additional row requests needed.
+ request_path = "/projects/request-project/queries"
+ client._call_api.assert_called_once_with(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": request_path},
+ method="POST",
+ path=request_path,
+ data={
+ "query": "CREATE TABLE abc;",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ },
+ timeout=None,
+ )
+
+
+def test_query_and_wait_caches_completed_query_results_more_pages():
+ client = mock.create_autospec(Client)
+ client._list_rows_from_query_results = functools.partial(
+ Client._list_rows_from_query_results, client
+ )
+ client._call_api.side_effect = (
+ {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "response-job-id",
+ "location": "response-location",
+ },
+ "jobComplete": True,
+ "queryId": "xyz",
+ "schema": {
+ "fields": [
+ {"name": "full_name", "type": "STRING", "mode": "REQUIRED"},
+ {"name": "age", "type": "INT64", "mode": "NULLABLE"},
+ ],
+ },
+ "rows": [
+ {"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]},
+ {"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]},
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ],
+ # Even though totalRows <= len(rows), we should use the presense of a
+ # next page token to decide if there are any more pages.
+ "totalRows": 2,
+ "pageToken": "page-2",
+ },
+ # TODO(swast): This is a case where we can avoid a call to jobs.get,
+ # but currently do so because the RowIterator might need the
+ # destination table, since results aren't fully cached.
+ {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "response-job-id",
+ "location": "response-location",
+ },
+ },
+ {
+ "rows": [
+ {"f": [{"v": "Pebbles Phlyntstone"}, {"v": "4"}]},
+ {"f": [{"v": "Bamm-Bamm Rhubble"}, {"v": "5"}]},
+ {"f": [{"v": "Joseph Rockhead"}, {"v": "32"}]},
+ {"f": [{"v": "Perry Masonry"}, {"v": "33"}]},
+ ],
+ "totalRows": 3,
+ "pageToken": "page-3",
+ },
+ {
+ "rows": [
+ {"f": [{"v": "Pearl Slaghoople"}, {"v": "53"}]},
+ ],
+ "totalRows": 4,
+ },
+ )
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="SELECT full_name, age FROM people;",
+ project="request-project",
+ job_config=None,
+ location=None,
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+ assert rows.total_rows == 2 # Match the API response.
+ rows_list = list(rows)
+ assert rows.total_rows == 4 # Match the final API response.
+ assert len(rows_list) == 9
+
+ # Start the query.
+ jobs_query_path = "/projects/request-project/queries"
+ client._call_api.assert_any_call(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": jobs_query_path},
+ method="POST",
+ path=jobs_query_path,
+ data={
+ "query": "SELECT full_name, age FROM people;",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ },
+ timeout=None,
+ )
+
+ # TODO(swast): Fetching job metadata isn't necessary in this case.
+ jobs_get_path = "/projects/response-project/jobs/response-job-id"
+ client._call_api.assert_any_call(
+ None, # retry
+ span_name="BigQuery.job.reload",
+ span_attributes={"path": jobs_get_path},
+ job_ref=mock.ANY,
+ method="GET",
+ path=jobs_get_path,
+ query_params={"location": "response-location"},
+ timeout=None,
+ )
+
+ # Fetch the remaining two pages.
+ jobs_get_query_results_path = "/projects/response-project/queries/response-job-id"
+ client._call_api.assert_any_call(
+ None, # retry
+ timeout=None,
+ method="GET",
+ path=jobs_get_query_results_path,
+ query_params={
+ "pageToken": "page-2",
+ "fields": "jobReference,totalRows,pageToken,rows",
+ "location": "response-location",
+ "formatOptions.useInt64Timestamp": True,
+ },
+ )
+ client._call_api.assert_any_call(
+ None, # retry
+ timeout=None,
+ method="GET",
+ path=jobs_get_query_results_path,
+ query_params={
+ "pageToken": "page-3",
+ "fields": "jobReference,totalRows,pageToken,rows",
+ "location": "response-location",
+ "formatOptions.useInt64Timestamp": True,
+ },
+ )
+
+
+def test_query_and_wait_incomplete_query():
+ client = mock.create_autospec(Client)
+ client._get_query_results = functools.partial(Client._get_query_results, client)
+ client._list_rows_from_query_results = functools.partial(
+ Client._list_rows_from_query_results, client
+ )
+ client._call_api.side_effect = (
+ {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "response-job-id",
+ "location": "response-location",
+ },
+ "jobComplete": False,
+ },
+ {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "response-job-id",
+ "location": "response-location",
+ },
+ "jobComplete": True,
+ "totalRows": 2,
+ "queryId": "xyz",
+ "schema": {
+ "fields": [
+ {"name": "full_name", "type": "STRING", "mode": "REQUIRED"},
+ {"name": "age", "type": "INT64", "mode": "NULLABLE"},
+ ],
+ },
+ },
+ {
+ "jobReference": {
+ "projectId": "response-project",
+ "jobId": "response-job-id",
+ "location": "response-location",
+ },
+ },
+ {
+ "rows": [
+ {"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]},
+ {"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]},
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ],
+ # Even though totalRows <= len(rows), we should use the presense of a
+ # next page token to decide if there are any more pages.
+ "totalRows": 2,
+ "pageToken": "page-2",
+ },
+ {
+ "rows": [
+ {"f": [{"v": "Pearl Slaghoople"}, {"v": "53"}]},
+ ],
+ },
+ )
+ rows = _job_helpers.query_and_wait(
+ client,
+ query="SELECT full_name, age FROM people;",
+ project="request-project",
+ job_config=None,
+ location=None,
+ retry=None,
+ job_retry=None,
+ page_size=None,
+ max_results=None,
+ )
+ rows_list = list(rows)
+ assert rows.total_rows == 2 # Match the API response.
+ assert len(rows_list) == 5
+
+ # Start the query.
+ jobs_query_path = "/projects/request-project/queries"
+ client._call_api.assert_any_call(
+ None, # retry
+ span_name="BigQuery.query",
+ span_attributes={"path": jobs_query_path},
+ method="POST",
+ path=jobs_query_path,
+ data={
+ "query": "SELECT full_name, age FROM people;",
+ "useLegacySql": False,
+ "formatOptions": {
+ "useInt64Timestamp": True,
+ },
+ "requestId": mock.ANY,
+ },
+ timeout=None,
+ )
+
+ # Wait for the query to finish.
+ jobs_get_query_results_path = "/projects/response-project/queries/response-job-id"
+ client._call_api.assert_any_call(
+ None, # retry
+ span_name="BigQuery.getQueryResults",
+ span_attributes={"path": jobs_get_query_results_path},
+ method="GET",
+ path=jobs_get_query_results_path,
+ query_params={
+ # job_query.QueryJob uses getQueryResults to wait for the query to finish.
+ # It avoids fetching the results because:
+ # (1) For large rows this can take a long time, much longer than
+ # our progress bar update frequency.
+ # See: https://github.com/googleapis/python-bigquery/issues/403
+ # (2) Caching the first page of results uses an unexpected increase in memory.
+ # See: https://github.com/googleapis/python-bigquery/issues/394
+ "maxResults": 0,
+ "location": "response-location",
+ },
+ timeout=None,
+ )
+
+ # Fetch the job metadata in case the RowIterator needs the destination table.
+ jobs_get_path = "/projects/response-project/jobs/response-job-id"
+ client._call_api.assert_any_call(
+ None, # retry
+ span_name="BigQuery.job.reload",
+ span_attributes={"path": jobs_get_path},
+ job_ref=mock.ANY,
+ method="GET",
+ path=jobs_get_path,
+ query_params={"location": "response-location"},
+ timeout=None,
+ )
+
+ # Fetch the remaining two pages.
+ client._call_api.assert_any_call(
+ None, # retry
+ timeout=None,
+ method="GET",
+ path=jobs_get_query_results_path,
+ query_params={
+ "fields": "jobReference,totalRows,pageToken,rows",
+ "location": "response-location",
+ "formatOptions.useInt64Timestamp": True,
+ },
+ )
+ client._call_api.assert_any_call(
+ None, # retry
+ timeout=None,
+ method="GET",
+ path=jobs_get_query_results_path,
+ query_params={
+ "pageToken": "page-2",
+ "fields": "jobReference,totalRows,pageToken,rows",
+ "location": "response-location",
+ "formatOptions.useInt64Timestamp": True,
+ },
+ )
+
+
def test_make_job_id_wo_suffix():
job_id = _job_helpers.make_job_id("job_id")
assert job_id == "job_id"
@@ -335,3 +1056,120 @@ def test_make_job_id_random():
def test_make_job_id_w_job_id_overrides_prefix():
job_id = _job_helpers.make_job_id("job_id", prefix="unused_prefix")
assert job_id == "job_id"
+
+
+@pytest.mark.parametrize(
+ ("job_config", "expected"),
+ (
+ pytest.param(None, True),
+ pytest.param(job_query.QueryJobConfig(), True, id="default"),
+ pytest.param(
+ job_query.QueryJobConfig(use_query_cache=False), True, id="use_query_cache"
+ ),
+ pytest.param(
+ job_query.QueryJobConfig(maximum_bytes_billed=10_000_000),
+ True,
+ id="maximum_bytes_billed",
+ ),
+ pytest.param(
+ job_query.QueryJobConfig(clustering_fields=["a", "b", "c"]),
+ False,
+ id="clustering_fields",
+ ),
+ pytest.param(
+ job_query.QueryJobConfig(destination="p.d.t"), False, id="destination"
+ ),
+ pytest.param(
+ job_query.QueryJobConfig(
+ destination_encryption_configuration=job_query.EncryptionConfiguration(
+ "key"
+ )
+ ),
+ False,
+ id="destination_encryption_configuration",
+ ),
+ ),
+)
+def test_supported_by_jobs_query(
+ job_config: Optional[job_query.QueryJobConfig], expected: bool
+):
+ assert _job_helpers._supported_by_jobs_query(job_config) == expected
+
+
+def test_wait_or_cancel_no_exception():
+ job = mock.create_autospec(job_query.QueryJob, instance=True)
+ expected_rows = object()
+ job.result.return_value = expected_rows
+ retry = retries.Retry()
+
+ rows = _job_helpers._wait_or_cancel(
+ job,
+ api_timeout=123,
+ wait_timeout=456,
+ retry=retry,
+ page_size=789,
+ max_results=101112,
+ )
+
+ job.result.assert_called_once_with(
+ timeout=456,
+ retry=retry,
+ page_size=789,
+ max_results=101112,
+ )
+ assert rows is expected_rows
+
+
+def test_wait_or_cancel_exception_cancels_job():
+ job = mock.create_autospec(job_query.QueryJob, instance=True)
+ job.result.side_effect = google.api_core.exceptions.BadGateway("test error")
+ retry = retries.Retry()
+
+ with pytest.raises(google.api_core.exceptions.BadGateway):
+ _job_helpers._wait_or_cancel(
+ job,
+ api_timeout=123,
+ wait_timeout=456,
+ retry=retry,
+ page_size=789,
+ max_results=101112,
+ )
+
+ job.result.assert_called_once_with(
+ timeout=456,
+ retry=retry,
+ page_size=789,
+ max_results=101112,
+ )
+ job.cancel.assert_called_once_with(
+ timeout=123,
+ retry=retry,
+ )
+
+
+def test_wait_or_cancel_exception_raises_original_exception():
+ job = mock.create_autospec(job_query.QueryJob, instance=True)
+ job.result.side_effect = google.api_core.exceptions.BadGateway("test error")
+ job.cancel.side_effect = google.api_core.exceptions.NotFound("don't raise me")
+ retry = retries.Retry()
+
+ with pytest.raises(google.api_core.exceptions.BadGateway):
+ _job_helpers._wait_or_cancel(
+ job,
+ api_timeout=123,
+ wait_timeout=456,
+ retry=retry,
+ page_size=789,
+ max_results=101112,
+ )
+
+ job.result.assert_called_once_with(
+ timeout=456,
+ retry=retry,
+ page_size=789,
+ max_results=101112,
+ )
+ job.cancel.assert_called_once_with(
+ timeout=123,
+ retry=retry,
+ )
diff --git a/tests/unit/test__pandas_helpers.py b/tests/unit/test__pandas_helpers.py
index 212a6f1dd..ad40a6da6 100644
--- a/tests/unit/test__pandas_helpers.py
+++ b/tests/unit/test__pandas_helpers.py
@@ -19,7 +19,11 @@
import operator
import queue
import warnings
-import pkg_resources
+
+try:
+ import importlib.metadata as metadata
+except ImportError:
+ import importlib_metadata as metadata
import mock
@@ -57,13 +61,10 @@
bigquery_storage = _versions_helpers.BQ_STORAGE_VERSIONS.try_import()
-PANDAS_MINIUM_VERSION = pkg_resources.parse_version("1.0.0")
-
if pandas is not None:
- PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
+ PANDAS_INSTALLED_VERSION = metadata.version("pandas")
else:
- # Set to less than MIN version.
- PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0")
+ PANDAS_INSTALLED_VERSION = "0.0.0"
skip_if_no_bignumeric = pytest.mark.skipif(
@@ -542,9 +543,7 @@ def test_bq_to_arrow_array_w_nullable_scalars(module_under_test, bq_type, rows):
],
)
@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
-@pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason=""
-)
+@pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`")
def test_bq_to_arrow_array_w_pandas_timestamp(module_under_test, bq_type, rows):
rows = [pandas.Timestamp(row) for row in rows]
@@ -806,10 +805,7 @@ def test_list_columns_and_indexes_with_named_index_same_as_column_name(
assert columns_and_indexes == expected
-@pytest.mark.skipif(
- pandas is None or PANDAS_INSTALLED_VERSION < PANDAS_MINIUM_VERSION,
- reason="Requires `pandas version >= 1.0.0` which introduces pandas.NA",
-)
+@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
def test_dataframe_to_json_generator(module_under_test):
utcnow = datetime.datetime.utcnow()
df_data = collections.OrderedDict(
@@ -837,16 +833,8 @@ def test_dataframe_to_json_generator(module_under_test):
assert list(rows) == expected
+@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
def test_dataframe_to_json_generator_repeated_field(module_under_test):
- pytest.importorskip(
- "pandas",
- minversion=str(PANDAS_MINIUM_VERSION),
- reason=(
- f"Requires `pandas version >= {PANDAS_MINIUM_VERSION}` "
- "which introduces pandas.NA"
- ),
- )
-
df_data = [
collections.OrderedDict(
[("repeated_col", [pandas.NA, 2, None, 4]), ("not_repeated_col", "first")]
@@ -1017,30 +1005,41 @@ def test_dataframe_to_arrow_with_required_fields(module_under_test):
)
data = {
- "field01": ["hello", "world"],
- "field02": [b"abd", b"efg"],
- "field03": [1, 2],
- "field04": [3, 4],
- "field05": [1.25, 9.75],
- "field06": [-1.75, -3.5],
- "field07": [decimal.Decimal("1.2345"), decimal.Decimal("6.7891")],
+ "field01": ["hello", None, "world"],
+ "field02": [b"abd", b"efg", b"hij"],
+ "field03": [1, 2, 3],
+ "field04": [4, None, 5],
+ "field05": [1.25, 0.0, 9.75],
+ "field06": [-1.75, None, -3.5],
+ "field07": [
+ decimal.Decimal("1.2345"),
+ decimal.Decimal("6.7891"),
+ -decimal.Decimal("10.111213"),
+ ],
"field08": [
decimal.Decimal("-{d38}.{d38}".format(d38="9" * 38)),
+ None,
decimal.Decimal("{d38}.{d38}".format(d38="9" * 38)),
],
- "field09": [True, False],
- "field10": [False, True],
+ "field09": [True, False, True],
+ "field10": [False, True, None],
"field11": [
datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc),
datetime.datetime(2012, 12, 21, 9, 7, 42, tzinfo=datetime.timezone.utc),
+ datetime.datetime(2022, 7, 14, 23, 59, 59, tzinfo=datetime.timezone.utc),
],
- "field12": [datetime.date(9999, 12, 31), datetime.date(1970, 1, 1)],
- "field13": [datetime.time(23, 59, 59, 999999), datetime.time(12, 0, 0)],
+ "field12": [datetime.date(9999, 12, 31), None, datetime.date(1970, 1, 1)],
+ "field13": [datetime.time(23, 59, 59, 999999), None, datetime.time(12, 0, 0)],
"field14": [
datetime.datetime(1970, 1, 1, 0, 0, 0),
+ None,
datetime.datetime(2012, 12, 21, 9, 7, 42),
],
- "field15": ["POINT(30 10)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"],
+ "field15": [
+ None,
+ "POINT(30 10)",
+ "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))",
+ ],
}
dataframe = pandas.DataFrame(data)
@@ -1049,7 +1048,11 @@ def test_dataframe_to_arrow_with_required_fields(module_under_test):
assert len(arrow_schema) == len(bq_schema)
for arrow_field in arrow_schema:
- assert not arrow_field.nullable
+ # Even if the remote schema is REQUIRED, there's a chance there's
+ # local NULL values. Arrow will gladly interpret these NULL values
+ # as non-NULL and give you an arbitrary value. See:
+ # https://github.com/googleapis/python-bigquery/issues/1692
+ assert arrow_field.nullable
@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
@@ -1101,7 +1104,11 @@ def test_dataframe_to_arrow_dict_sequence_schema(module_under_test):
arrow_schema = arrow_table.schema
expected_fields = [
- pyarrow.field("field01", "string", nullable=False),
+ # Even if the remote schema is REQUIRED, there's a chance there's
+ # local NULL values. Arrow will gladly interpret these NULL values
+ # as non-NULL and give you an arbitrary value. See:
+ # https://github.com/googleapis/python-bigquery/issues/1692
+ pyarrow.field("field01", "string", nullable=True),
pyarrow.field("field02", "bool", nullable=True),
]
assert list(arrow_schema) == expected_fields
diff --git a/tests/unit/test__versions_helpers.py b/tests/unit/test__versions_helpers.py
index 144f14b7c..afe170e7a 100644
--- a/tests/unit/test__versions_helpers.py
+++ b/tests/unit/test__versions_helpers.py
@@ -26,6 +26,11 @@
except ImportError: # pragma: NO COVER
bigquery_storage = None
+try:
+ import pandas # type: ignore
+except ImportError: # pragma: NO COVER
+ pandas = None
+
from google.cloud.bigquery import _versions_helpers
from google.cloud.bigquery import exceptions
@@ -173,3 +178,49 @@ def test_bqstorage_is_read_session_optional_false():
bqstorage_versions = _versions_helpers.BQStorageVersions()
with mock.patch("google.cloud.bigquery_storage.__version__", new="2.5.0"):
assert not bqstorage_versions.is_read_session_optional
+
+
+@pytest.mark.skipif(pandas is None, reason="pandas is not installed")
+@pytest.mark.parametrize("version", ["1.5.0", "2.0.0", "2.1.0"])
+def test_try_import_raises_no_error_w_recent_pandas(version):
+ versions = _versions_helpers.PandasVersions()
+ with mock.patch("pandas.__version__", new=version):
+ try:
+ pandas = versions.try_import(raise_if_error=True)
+ assert pandas is not None
+ except exceptions.LegacyPandasError: # pragma: NO COVER
+ raise ("Legacy error raised with a non-legacy dependency version.")
+
+
+@pytest.mark.skipif(pandas is None, reason="pandas is not installed")
+def test_try_import_returns_none_w_legacy_pandas():
+ versions = _versions_helpers.PandasVersions()
+ with mock.patch("pandas.__version__", new="1.0.0"):
+ pandas = versions.try_import()
+ assert pandas is None
+
+
+@pytest.mark.skipif(pandas is None, reason="pandas is not installed")
+def test_try_import_raises_error_w_legacy_pandas():
+ versions = _versions_helpers.PandasVersions()
+ with mock.patch("pandas.__version__", new="1.0.0"):
+ with pytest.raises(exceptions.LegacyPandasError):
+ versions.try_import(raise_if_error=True)
+
+
+@pytest.mark.skipif(pandas is None, reason="pandas is not installed")
+def test_installed_pandas_version_returns_cached():
+ versions = _versions_helpers.PandasVersions()
+ versions._installed_version = object()
+ assert versions.installed_version is versions._installed_version
+
+
+@pytest.mark.skipif(pandas is None, reason="pandas is not installed")
+def test_installed_pandas_version_returns_parsed_version():
+ versions = _versions_helpers.PandasVersions()
+ with mock.patch("pandas.__version__", new="1.1.0"):
+ version = versions.installed_version
+
+ assert version.major == 1
+ assert version.minor == 1
+ assert version.micro == 0
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index d470bd9fd..c8968adbb 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -30,7 +30,11 @@
import requests
import packaging
import pytest
-import pkg_resources
+
+try:
+ import importlib.metadata as metadata
+except ImportError:
+ import importlib_metadata as metadata
try:
import pandas
@@ -66,8 +70,9 @@
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery import exceptions
-from google.cloud.bigquery.retry import DEFAULT_TIMEOUT
from google.cloud.bigquery import ParquetOptions
+from google.cloud.bigquery.retry import DEFAULT_TIMEOUT
+import google.cloud.bigquery.table
try:
from google.cloud import bigquery_storage
@@ -76,13 +81,10 @@
from test_utils.imports import maybe_fail_import
from tests.unit.helpers import make_connection
-PANDAS_MINIUM_VERSION = pkg_resources.parse_version("1.0.0")
-
if pandas is not None:
- PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
+ PANDAS_INSTALLED_VERSION = metadata.version("pandas")
else:
- # Set to less than MIN version.
- PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0")
+ PANDAS_INSTALLED_VERSION = "0.0.0"
def _make_credentials():
@@ -4952,20 +4954,17 @@ def test_query_w_client_default_config_no_incoming(self):
)
def test_query_w_invalid_default_job_config(self):
- job_id = "some-job-id"
- query = "select count(*) from persons"
creds = _make_credentials()
http = object()
default_job_config = object()
- client = self._make_one(
- project=self.PROJECT,
- credentials=creds,
- _http=http,
- default_query_job_config=default_job_config,
- )
with self.assertRaises(TypeError) as exc:
- client.query(query, job_id=job_id, location=self.LOCATION)
+ self._make_one(
+ project=self.PROJECT,
+ credentials=creds,
+ _http=http,
+ default_query_job_config=default_job_config,
+ )
self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0])
def test_query_w_client_location(self):
@@ -5212,6 +5211,150 @@ def test_query_job_rpc_fail_w_conflict_random_id_job_fetch_succeeds(self):
assert result is mock.sentinel.query_job
+ def test_query_and_wait_defaults(self):
+ query = "select count(*) from `bigquery-public-data.usa_names.usa_1910_2013`"
+ jobs_query_response = {
+ "jobComplete": True,
+ "schema": {
+ "fields": [
+ {
+ "name": "f0_",
+ "type": "INTEGER",
+ "mode": "NULLABLE",
+ },
+ ],
+ },
+ "totalRows": "1",
+ "rows": [{"f": [{"v": "5552452"}]}],
+ "queryId": "job_abcDEF_",
+ }
+ creds = _make_credentials()
+ http = object()
+ client = self._make_one(project=self.PROJECT, credentials=creds, _http=http)
+ conn = client._connection = make_connection(jobs_query_response)
+
+ rows = client.query_and_wait(query)
+
+ self.assertIsInstance(rows, google.cloud.bigquery.table.RowIterator)
+ self.assertEqual(rows.query_id, "job_abcDEF_")
+ self.assertEqual(rows.total_rows, 1)
+ # No job reference in the response should be OK for completed query.
+ self.assertIsNone(rows.job_id)
+ self.assertIsNone(rows.project)
+ self.assertIsNone(rows.location)
+
+ # Verify the request we send is to jobs.query.
+ conn.api_request.assert_called_once()
+ _, req = conn.api_request.call_args
+ self.assertEqual(req["method"], "POST")
+ self.assertEqual(req["path"], "/projects/PROJECT/queries")
+ self.assertEqual(req["timeout"], DEFAULT_TIMEOUT)
+ sent = req["data"]
+ self.assertEqual(sent["query"], query)
+ self.assertFalse(sent["useLegacySql"])
+
+ def test_query_and_wait_w_default_query_job_config(self):
+ from google.cloud.bigquery import job
+
+ query = "select count(*) from `bigquery-public-data.usa_names.usa_1910_2013`"
+ jobs_query_response = {
+ "jobComplete": True,
+ }
+ creds = _make_credentials()
+ http = object()
+ client = self._make_one(
+ project=self.PROJECT,
+ credentials=creds,
+ _http=http,
+ default_query_job_config=job.QueryJobConfig(
+ labels={
+ "default-label": "default-value",
+ },
+ ),
+ )
+ conn = client._connection = make_connection(jobs_query_response)
+
+ _ = client.query_and_wait(query)
+
+ # Verify the request we send is to jobs.query.
+ conn.api_request.assert_called_once()
+ _, req = conn.api_request.call_args
+ self.assertEqual(req["method"], "POST")
+ self.assertEqual(req["path"], f"/projects/{self.PROJECT}/queries")
+ sent = req["data"]
+ self.assertEqual(sent["labels"], {"default-label": "default-value"})
+
+ def test_query_and_wait_w_job_config(self):
+ from google.cloud.bigquery import job
+
+ query = "select count(*) from `bigquery-public-data.usa_names.usa_1910_2013`"
+ jobs_query_response = {
+ "jobComplete": True,
+ }
+ creds = _make_credentials()
+ http = object()
+ client = self._make_one(
+ project=self.PROJECT,
+ credentials=creds,
+ _http=http,
+ )
+ conn = client._connection = make_connection(jobs_query_response)
+
+ _ = client.query_and_wait(
+ query,
+ job_config=job.QueryJobConfig(
+ labels={
+ "job_config-label": "job_config-value",
+ },
+ ),
+ )
+
+ # Verify the request we send is to jobs.query.
+ conn.api_request.assert_called_once()
+ _, req = conn.api_request.call_args
+ self.assertEqual(req["method"], "POST")
+ self.assertEqual(req["path"], f"/projects/{self.PROJECT}/queries")
+ sent = req["data"]
+ self.assertEqual(sent["labels"], {"job_config-label": "job_config-value"})
+
+ def test_query_and_wait_w_location(self):
+ query = "select count(*) from `bigquery-public-data.usa_names.usa_1910_2013`"
+ jobs_query_response = {
+ "jobComplete": True,
+ }
+ creds = _make_credentials()
+ http = object()
+ client = self._make_one(project=self.PROJECT, credentials=creds, _http=http)
+ conn = client._connection = make_connection(jobs_query_response)
+
+ _ = client.query_and_wait(query, location="not-the-client-location")
+
+ # Verify the request we send is to jobs.query.
+ conn.api_request.assert_called_once()
+ _, req = conn.api_request.call_args
+ self.assertEqual(req["method"], "POST")
+ self.assertEqual(req["path"], f"/projects/{self.PROJECT}/queries")
+ sent = req["data"]
+ self.assertEqual(sent["location"], "not-the-client-location")
+
+ def test_query_and_wait_w_project(self):
+ query = "select count(*) from `bigquery-public-data.usa_names.usa_1910_2013`"
+ jobs_query_response = {
+ "jobComplete": True,
+ }
+ creds = _make_credentials()
+ http = object()
+ client = self._make_one(project=self.PROJECT, credentials=creds, _http=http)
+ conn = client._connection = make_connection(jobs_query_response)
+
+ _ = client.query_and_wait(query, project="not-the-client-project")
+
+ # Verify the request we send is to jobs.query.
+ conn.api_request.assert_called_once()
+ _, req = conn.api_request.call_args
+ self.assertEqual(req["method"], "POST")
+ self.assertEqual(req["path"], "/projects/not-the-client-project/queries")
+
def test_insert_rows_w_timeout(self):
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import Table
@@ -6401,11 +6544,16 @@ def test_list_rows(self):
age = SchemaField("age", "INTEGER", mode="NULLABLE")
joined = SchemaField("joined", "TIMESTAMP", mode="NULLABLE")
table = Table(self.TABLE_REF, schema=[full_name, age, joined])
+ table._properties["location"] = "us-central1"
table._properties["numRows"] = 7
iterator = client.list_rows(table, timeout=7.5)
- # Check that initial total_rows is populated from the table.
+ # Check that initial RowIterator is populated from the table metadata.
+ self.assertIsNone(iterator.job_id)
+ self.assertEqual(iterator.location, "us-central1")
+ self.assertEqual(iterator.project, table.project)
+ self.assertIsNone(iterator.query_id)
self.assertEqual(iterator.total_rows, 7)
page = next(iterator.pages)
rows = list(page)
@@ -6521,6 +6669,10 @@ def test_list_rows_empty_table(self):
selected_fields=[],
)
+ self.assertIsNone(rows.job_id)
+ self.assertIsNone(rows.location)
+ self.assertEqual(rows.project, self.TABLE_REF.project)
+ self.assertIsNone(rows.query_id)
# When a table reference / string and selected_fields is provided,
# total_rows can't be populated until iteration starts.
self.assertIsNone(rows.total_rows)
@@ -8136,10 +8288,7 @@ def test_load_table_from_dataframe_unknown_table(self):
timeout=DEFAULT_TIMEOUT,
)
- @unittest.skipIf(
- pandas is None or PANDAS_INSTALLED_VERSION < PANDAS_MINIUM_VERSION,
- "Only `pandas version >=1.0.0` supported",
- )
+ @unittest.skipIf(pandas is None, "Requires `pandas`")
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_load_table_from_dataframe_w_nullable_int64_datatype(self):
from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES
@@ -8184,10 +8333,7 @@ def test_load_table_from_dataframe_w_nullable_int64_datatype(self):
SchemaField("x", "INT64", "NULLABLE", None),
)
- @unittest.skipIf(
- pandas is None or PANDAS_INSTALLED_VERSION < PANDAS_MINIUM_VERSION,
- "Only `pandas version >=1.0.0` supported",
- )
+ @unittest.skipIf(pandas is None, "Requires `pandas`")
# @unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_load_table_from_dataframe_w_nullable_int64_datatype_automatic_schema(self):
from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES
diff --git a/tests/unit/test_create_dataset.py b/tests/unit/test_create_dataset.py
index 3b2e644d9..8374e6e0a 100644
--- a/tests/unit/test_create_dataset.py
+++ b/tests/unit/test_create_dataset.py
@@ -466,3 +466,82 @@ def test_create_dataset_with_default_rounding_mode_if_value_is_in_possible_value
},
timeout=DEFAULT_TIMEOUT,
)
+
+
+def test_create_dataset_with_max_time_travel_hours(PROJECT, DS_ID, LOCATION):
+ path = "/projects/%s/datasets" % PROJECT
+ max_time_travel_hours = 24 * 3
+
+ resource = {
+ "datasetReference": {"projectId": PROJECT, "datasetId": DS_ID},
+ "etag": "etag",
+ "id": "{}:{}".format(PROJECT, DS_ID),
+ "location": LOCATION,
+ "maxTimeTravelHours": max_time_travel_hours,
+ }
+ client = make_client(location=LOCATION)
+ conn = client._connection = make_connection(resource)
+
+ ds_ref = DatasetReference(PROJECT, DS_ID)
+ before = Dataset(ds_ref)
+ before.max_time_travel_hours = max_time_travel_hours
+ after = client.create_dataset(before)
+ assert after.dataset_id == DS_ID
+ assert after.project == PROJECT
+ assert after.max_time_travel_hours == max_time_travel_hours
+
+ conn.api_request.assert_called_once_with(
+ method="POST",
+ path=path,
+ data={
+ "datasetReference": {"projectId": PROJECT, "datasetId": DS_ID},
+ "labels": {},
+ "location": LOCATION,
+ "maxTimeTravelHours": max_time_travel_hours,
+ },
+ timeout=DEFAULT_TIMEOUT,
+ )
+
+
+def test_create_dataset_with_max_time_travel_hours_not_multiple_of_24(
+ PROJECT, DS_ID, LOCATION
+):
+ ds_ref = DatasetReference(PROJECT, DS_ID)
+ dataset = Dataset(ds_ref)
+ with pytest.raises(ValueError) as e:
+ dataset.max_time_travel_hours = 50
+ assert str(e.value) == "Time Travel Window should be multiple of 24"
+
+
+def test_create_dataset_with_max_time_travel_hours_is_less_than_2_days(
+ PROJECT, DS_ID, LOCATION
+):
+ ds_ref = DatasetReference(PROJECT, DS_ID)
+ dataset = Dataset(ds_ref)
+ with pytest.raises(ValueError) as e:
+ dataset.max_time_travel_hours = 24
+ assert (
+ str(e.value)
+ == "Time Travel Window should be from 48 to 168 hours (2 to 7 days)"
+ )
+
+
+def test_create_dataset_with_max_time_travel_hours_is_greater_than_7_days(
+ PROJECT, DS_ID, LOCATION
+):
+ ds_ref = DatasetReference(PROJECT, DS_ID)
+ dataset = Dataset(ds_ref)
+ with pytest.raises(ValueError) as e:
+ dataset.max_time_travel_hours = 192
+ assert (
+ str(e.value)
+ == "Time Travel Window should be from 48 to 168 hours (2 to 7 days)"
+ )
+
+
+def test_create_dataset_with_max_time_travel_hours_is_not_int(PROJECT, DS_ID, LOCATION):
+ ds_ref = DatasetReference(PROJECT, DS_ID)
+ dataset = Dataset(ds_ref)
+ with pytest.raises(ValueError) as e:
+ dataset.max_time_travel_hours = "50"
+ assert str(e.value) == "max_time_travel_hours must be an integer. Got 50"
diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py
index 0a709ab43..423349a51 100644
--- a/tests/unit/test_dataset.py
+++ b/tests/unit/test_dataset.py
@@ -744,6 +744,9 @@ def _verify_resource_properties(self, dataset, resource):
self.assertEqual(dataset.description, resource.get("description"))
self.assertEqual(dataset.friendly_name, resource.get("friendlyName"))
self.assertEqual(dataset.location, resource.get("location"))
+ self.assertEqual(
+ dataset.is_case_insensitive, resource.get("isCaseInsensitive") or False
+ )
if "defaultEncryptionConfiguration" in resource:
self.assertEqual(
dataset.default_encryption_configuration.kms_key_name,
@@ -781,6 +784,7 @@ def test_ctor_defaults(self):
self.assertIsNone(dataset.description)
self.assertIsNone(dataset.friendly_name)
self.assertIsNone(dataset.location)
+ self.assertEqual(dataset.is_case_insensitive, False)
def test_ctor_string(self):
dataset = self._make_one("some-project.some_dset")
@@ -818,6 +822,7 @@ def test_ctor_explicit(self):
self.assertIsNone(dataset.description)
self.assertIsNone(dataset.friendly_name)
self.assertIsNone(dataset.location)
+ self.assertEqual(dataset.is_case_insensitive, False)
def test_access_entries_setter_non_list(self):
dataset = self._make_one(self.DS_REF)
@@ -910,6 +915,26 @@ def test_labels_getter_missing_value(self):
dataset = self._make_one(self.DS_REF)
self.assertEqual(dataset.labels, {})
+ def test_is_case_insensitive_setter_bad_value(self):
+ dataset = self._make_one(self.DS_REF)
+ with self.assertRaises(ValueError):
+ dataset.is_case_insensitive = 0
+
+ def test_is_case_insensitive_setter_true(self):
+ dataset = self._make_one(self.DS_REF)
+ dataset.is_case_insensitive = True
+ self.assertEqual(dataset.is_case_insensitive, True)
+
+ def test_is_case_insensitive_setter_none(self):
+ dataset = self._make_one(self.DS_REF)
+ dataset.is_case_insensitive = None
+ self.assertEqual(dataset.is_case_insensitive, False)
+
+ def test_is_case_insensitive_setter_false(self):
+ dataset = self._make_one(self.DS_REF)
+ dataset.is_case_insensitive = False
+ self.assertEqual(dataset.is_case_insensitive, False)
+
def test_from_api_repr_missing_identity(self):
self._setUpConstants()
RESOURCE = {}
diff --git a/tests/unit/test_legacy_types.py b/tests/unit/test_legacy_types.py
index 3f51cc511..3431074fd 100644
--- a/tests/unit/test_legacy_types.py
+++ b/tests/unit/test_legacy_types.py
@@ -13,9 +13,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
import warnings
+try:
+ import proto # type: ignore
+except ImportError: # pragma: NO COVER
+ proto = None
+
+@pytest.mark.skipif(proto is None, reason="proto is not installed")
def test_importing_legacy_types_emits_warning():
with warnings.catch_warnings(record=True) as warned:
from google.cloud.bigquery_v2 import types # noqa: F401
diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py
new file mode 100644
index 000000000..6f1b16c66
--- /dev/null
+++ b/tests/unit/test_packaging.py
@@ -0,0 +1,37 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+
+
+def test_namespace_package_compat(tmp_path):
+ # The ``google`` namespace package should not be masked
+ # by the presence of ``google-cloud-bigquery``.
+ google = tmp_path / "google"
+ google.mkdir()
+ google.joinpath("othermod.py").write_text("")
+ env = dict(os.environ, PYTHONPATH=str(tmp_path))
+ cmd = [sys.executable, "-m", "google.othermod"]
+ subprocess.check_call(cmd, env=env)
+
+ # The ``google.cloud`` namespace package should not be masked
+ # by the presence of ``google-cloud-bigquery``.
+ google_cloud = tmp_path / "google" / "cloud"
+ google_cloud.mkdir()
+ google_cloud.joinpath("othermod.py").write_text("")
+ env = dict(os.environ, PYTHONPATH=str(tmp_path))
+ cmd = [sys.executable, "-m", "google.cloud.othermod"]
+ subprocess.check_call(cmd, env=env)
diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py
index 4b687152f..1704abac7 100644
--- a/tests/unit/test_query.py
+++ b/tests/unit/test_query.py
@@ -1281,7 +1281,6 @@ def test___repr__(self):
field1 = self._make_one("test", _make_subparam("field1", "STRING", "hello"))
got = repr(field1)
self.assertIn("StructQueryParameter", got)
- self.assertIn("'field1', 'STRING'", got)
self.assertIn("'field1': 'hello'", got)
@@ -1362,13 +1361,13 @@ def test_errors_present(self):
self.assertEqual(query.errors, ERRORS)
def test_job_id_missing(self):
- with self.assertRaises(ValueError):
- self._make_one({})
+ query = self._make_one({})
+ self.assertIsNone(query.job_id)
def test_job_id_broken_job_reference(self):
resource = {"jobReference": {"bogus": "BOGUS"}}
- with self.assertRaises(ValueError):
- self._make_one(resource)
+ query = self._make_one(resource)
+ self.assertIsNone(query.job_id)
def test_job_id_present(self):
resource = self._make_resource()
@@ -1376,6 +1375,16 @@ def test_job_id_present(self):
query = self._make_one(resource)
self.assertEqual(query.job_id, "custom-job")
+ def test_location_missing(self):
+ query = self._make_one({})
+ self.assertIsNone(query.location)
+
+ def test_location_present(self):
+ resource = self._make_resource()
+ resource["jobReference"]["location"] = "test-location"
+ query = self._make_one(resource)
+ self.assertEqual(query.location, "test-location")
+
def test_page_token_missing(self):
query = self._make_one(self._make_resource())
self.assertIsNone(query.page_token)
@@ -1386,6 +1395,16 @@ def test_page_token_present(self):
query = self._make_one(resource)
self.assertEqual(query.page_token, "TOKEN")
+ def test_query_id_missing(self):
+ query = self._make_one(self._make_resource())
+ self.assertIsNone(query.query_id)
+
+ def test_query_id_present(self):
+ resource = self._make_resource()
+ resource["queryId"] = "test-query-id"
+ query = self._make_one(resource)
+ self.assertEqual(query.query_id, "test-query-id")
+
def test_total_rows_present_integer(self):
resource = self._make_resource()
resource["totalRows"] = 42
diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py
index 60d04de89..1109b7ff2 100644
--- a/tests/unit/test_retry.py
+++ b/tests/unit/test_retry.py
@@ -125,6 +125,7 @@ def test_DEFAULT_JOB_RETRY_predicate():
def test_DEFAULT_JOB_RETRY_deadline():
- from google.cloud.bigquery.retry import DEFAULT_JOB_RETRY
+ from google.cloud.bigquery.retry import DEFAULT_JOB_RETRY, DEFAULT_RETRY
- assert DEFAULT_JOB_RETRY._deadline == 600
+ # Make sure we can retry the job at least once.
+ assert DEFAULT_JOB_RETRY._deadline > DEFAULT_RETRY._deadline
diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py
index fa2f30cea..9b3d4fe84 100644
--- a/tests/unit/test_table.py
+++ b/tests/unit/test_table.py
@@ -22,9 +22,13 @@
import warnings
import mock
-import pkg_resources
import pytest
+try:
+ import importlib.metadata as metadata
+except ImportError:
+ import importlib_metadata as metadata
+
import google.api_core.exceptions
from test_utils.imports import maybe_fail_import
@@ -71,9 +75,9 @@
tqdm = None
if pandas is not None:
- PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
+ PANDAS_INSTALLED_VERSION = metadata.version("pandas")
else:
- PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0")
+ PANDAS_INSTALLED_VERSION = "0.0.0"
def _mock_client():
@@ -2113,6 +2117,38 @@ def test_constructor_with_dict_schema(self):
]
self.assertEqual(iterator.schema, expected_schema)
+ def test_job_id_missing(self):
+ rows = self._make_one()
+ self.assertIsNone(rows.job_id)
+
+ def test_job_id_present(self):
+ rows = self._make_one(job_id="abc-123")
+ self.assertEqual(rows.job_id, "abc-123")
+
+ def test_location_missing(self):
+ rows = self._make_one()
+ self.assertIsNone(rows.location)
+
+ def test_location_present(self):
+ rows = self._make_one(location="asia-northeast1")
+ self.assertEqual(rows.location, "asia-northeast1")
+
+ def test_project_missing(self):
+ rows = self._make_one()
+ self.assertIsNone(rows.project)
+
+ def test_project_present(self):
+ rows = self._make_one(project="test-project")
+ self.assertEqual(rows.project, "test-project")
+
+ def test_query_id_missing(self):
+ rows = self._make_one()
+ self.assertIsNone(rows.query_id)
+
+ def test_query_id_present(self):
+ rows = self._make_one(query_id="xyz-987")
+ self.assertEqual(rows.query_id, "xyz-987")
+
def test_iterate(self):
from google.cloud.bigquery.schema import SchemaField
@@ -2165,9 +2201,18 @@ def test_iterate_with_cached_first_page(self):
path = "/foo"
api_request = mock.Mock(return_value={"rows": rows})
row_iterator = self._make_one(
- _mock_client(), api_request, path, schema, first_page_response=first_page
+ _mock_client(),
+ api_request,
+ path,
+ schema,
+ first_page_response=first_page,
+ total_rows=4,
)
+ self.assertEqual(row_iterator.total_rows, 4)
rows = list(row_iterator)
+ # Total rows should be maintained, even though subsequent API calls
+ # don't include it.
+ self.assertEqual(row_iterator.total_rows, 4)
self.assertEqual(len(rows), 4)
self.assertEqual(rows[0].age, 27)
self.assertEqual(rows[1].age, 28)
@@ -2178,6 +2223,39 @@ def test_iterate_with_cached_first_page(self):
method="GET", path=path, query_params={"pageToken": "next-page"}
)
+ def test_iterate_with_cached_first_page_max_results(self):
+ from google.cloud.bigquery.schema import SchemaField
+
+ first_page = {
+ "rows": [
+ {"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]},
+ {"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]},
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ],
+ "pageToken": "next-page",
+ }
+ schema = [
+ SchemaField("name", "STRING", mode="REQUIRED"),
+ SchemaField("age", "INTEGER", mode="REQUIRED"),
+ ]
+ path = "/foo"
+ api_request = mock.Mock(return_value=first_page)
+ row_iterator = self._make_one(
+ _mock_client(),
+ api_request,
+ path,
+ schema,
+ max_results=3,
+ first_page_response=first_page,
+ )
+ rows = list(row_iterator)
+ self.assertEqual(len(rows), 3)
+ self.assertEqual(rows[0].age, 27)
+ self.assertEqual(rows[1].age, 28)
+ self.assertEqual(rows[2].age, 32)
+ api_request.assert_not_called()
+
def test_page_size(self):
from google.cloud.bigquery.schema import SchemaField
@@ -2203,19 +2281,58 @@ def test_page_size(self):
query_params={"maxResults": row_iterator._page_size},
)
- def test__is_completely_cached_returns_false_without_first_page(self):
+ def test__is_almost_completely_cached_returns_false_without_first_page(self):
iterator = self._make_one(first_page_response=None)
- self.assertFalse(iterator._is_completely_cached())
+ self.assertFalse(iterator._is_almost_completely_cached())
- def test__is_completely_cached_returns_false_with_page_token(self):
- first_page = {"pageToken": "next-page"}
+ def test__is_almost_completely_cached_returns_true_with_more_rows_than_max_results(
+ self,
+ ):
+ rows = [
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ {"f": [{"v": "Whillma Phlyntstone"}, {"v": "27"}]},
+ {"f": [{"v": "Bhetty Rhubble"}, {"v": "28"}]},
+ ]
+ first_page = {"pageToken": "next-page", "rows": rows}
+ iterator = self._make_one(max_results=4, first_page_response=first_page)
+ self.assertTrue(iterator._is_almost_completely_cached())
+
+ def test__is_almost_completely_cached_returns_false_with_too_many_rows_remaining(
+ self,
+ ):
+ rows = [
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ]
+ first_page = {"pageToken": "next-page", "rows": rows}
+ iterator = self._make_one(first_page_response=first_page, total_rows=100)
+ self.assertFalse(iterator._is_almost_completely_cached())
+
+ def test__is_almost_completely_cached_returns_false_with_rows_remaining_and_no_total_rows(
+ self,
+ ):
+ rows = [
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ]
+ first_page = {"pageToken": "next-page", "rows": rows}
iterator = self._make_one(first_page_response=first_page)
- self.assertFalse(iterator._is_completely_cached())
+ self.assertFalse(iterator._is_almost_completely_cached())
+
+ def test__is_almost_completely_cached_returns_true_with_some_rows_remaining(self):
+ rows = [
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ ]
+ first_page = {"pageToken": "next-page", "rows": rows}
+ iterator = self._make_one(first_page_response=first_page, total_rows=6)
+ self.assertTrue(iterator._is_almost_completely_cached())
- def test__is_completely_cached_returns_true(self):
+ def test__is_almost_completely_cached_returns_true_with_no_rows_remaining(self):
first_page = {"rows": []}
iterator = self._make_one(first_page_response=first_page)
- self.assertTrue(iterator._is_completely_cached())
+ self.assertTrue(iterator._is_almost_completely_cached())
def test__validate_bqstorage_returns_false_when_completely_cached(self):
first_page = {"rows": []}
@@ -2226,6 +2343,25 @@ def test__validate_bqstorage_returns_false_when_completely_cached(self):
)
)
+ @unittest.skipIf(
+ bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
+ )
+ def test__validate_bqstorage_returns_true_if_no_cached_results(self):
+ iterator = self._make_one(first_page_response=None) # not cached
+ result = iterator._validate_bqstorage(
+ bqstorage_client=None, create_bqstorage_client=True
+ )
+ self.assertTrue(result)
+
+ def test__validate_bqstorage_returns_false_if_page_token_set(self):
+ iterator = self._make_one(
+ page_token="abc", first_page_response=None # not cached
+ )
+ result = iterator._validate_bqstorage(
+ bqstorage_client=None, create_bqstorage_client=True
+ )
+ self.assertFalse(result)
+
def test__validate_bqstorage_returns_false_if_max_results_set(self):
iterator = self._make_one(
max_results=10, first_page_response=None # not cached
@@ -3670,9 +3806,7 @@ def test_to_dataframe_w_dtypes_mapper(self):
self.assertEqual(df.timestamp.dtype.name, "object")
@unittest.skipIf(pandas is None, "Requires `pandas`")
- @pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason=""
- )
+ @pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
def test_to_dataframe_w_none_dtypes_mapper(self):
from google.cloud.bigquery.schema import SchemaField
@@ -3785,9 +3919,7 @@ def test_to_dataframe_w_unsupported_dtypes_mapper(self):
)
@unittest.skipIf(pandas is None, "Requires `pandas`")
- @pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason=""
- )
+ @pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
def test_to_dataframe_column_dtypes(self):
from google.cloud.bigquery.schema import SchemaField
diff --git a/tests/unit/test_table_arrow.py b/tests/unit/test_table_arrow.py
new file mode 100644
index 000000000..6f1e6f76a
--- /dev/null
+++ b/tests/unit/test_table_arrow.py
@@ -0,0 +1,134 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.cloud import bigquery
+import google.cloud.bigquery.table
+
+
+pyarrow = pytest.importorskip("pyarrow", minversion="3.0.0")
+
+
+def test_to_arrow_with_jobs_query_response():
+ resource = {
+ "kind": "bigquery#queryResponse",
+ "schema": {
+ "fields": [
+ {"name": "name", "type": "STRING", "mode": "NULLABLE"},
+ {"name": "number", "type": "INTEGER", "mode": "NULLABLE"},
+ ]
+ },
+ "jobReference": {
+ "projectId": "test-project",
+ "jobId": "job_ocd3cb-N62QIslU7R5qKKa2_427J",
+ "location": "US",
+ },
+ "totalRows": "9",
+ "rows": [
+ {"f": [{"v": "Tiarra"}, {"v": "6"}]},
+ {"f": [{"v": "Timothy"}, {"v": "325"}]},
+ {"f": [{"v": "Tina"}, {"v": "26"}]},
+ {"f": [{"v": "Tierra"}, {"v": "10"}]},
+ {"f": [{"v": "Tia"}, {"v": "17"}]},
+ {"f": [{"v": "Tiara"}, {"v": "22"}]},
+ {"f": [{"v": "Tiana"}, {"v": "6"}]},
+ {"f": [{"v": "Tiffany"}, {"v": "229"}]},
+ {"f": [{"v": "Tiffani"}, {"v": "8"}]},
+ ],
+ "totalBytesProcessed": "154775150",
+ "jobComplete": True,
+ "cacheHit": False,
+ "queryId": "job_ocd3cb-N62QIslU7R5qKKa2_427J",
+ }
+
+ rows = google.cloud.bigquery.table.RowIterator(
+ client=None,
+ api_request=None,
+ path=None,
+ schema=[
+ bigquery.SchemaField.from_api_repr(field)
+ for field in resource["schema"]["fields"]
+ ],
+ first_page_response=resource,
+ )
+ records = rows.to_arrow()
+
+ assert records.column_names == ["name", "number"]
+ assert records["name"].to_pylist() == [
+ "Tiarra",
+ "Timothy",
+ "Tina",
+ "Tierra",
+ "Tia",
+ "Tiara",
+ "Tiana",
+ "Tiffany",
+ "Tiffani",
+ ]
+ assert records["number"].to_pylist() == [6, 325, 26, 10, 17, 22, 6, 229, 8]
+
+
+def test_to_arrow_with_jobs_query_response_and_max_results():
+ resource = {
+ "kind": "bigquery#queryResponse",
+ "schema": {
+ "fields": [
+ {"name": "name", "type": "STRING", "mode": "NULLABLE"},
+ {"name": "number", "type": "INTEGER", "mode": "NULLABLE"},
+ ]
+ },
+ "jobReference": {
+ "projectId": "test-project",
+ "jobId": "job_ocd3cb-N62QIslU7R5qKKa2_427J",
+ "location": "US",
+ },
+ "totalRows": "9",
+ "rows": [
+ {"f": [{"v": "Tiarra"}, {"v": "6"}]},
+ {"f": [{"v": "Timothy"}, {"v": "325"}]},
+ {"f": [{"v": "Tina"}, {"v": "26"}]},
+ {"f": [{"v": "Tierra"}, {"v": "10"}]},
+ {"f": [{"v": "Tia"}, {"v": "17"}]},
+ {"f": [{"v": "Tiara"}, {"v": "22"}]},
+ {"f": [{"v": "Tiana"}, {"v": "6"}]},
+ {"f": [{"v": "Tiffany"}, {"v": "229"}]},
+ {"f": [{"v": "Tiffani"}, {"v": "8"}]},
+ ],
+ "totalBytesProcessed": "154775150",
+ "jobComplete": True,
+ "cacheHit": False,
+ "queryId": "job_ocd3cb-N62QIslU7R5qKKa2_427J",
+ }
+
+ rows = google.cloud.bigquery.table.RowIterator(
+ client=None,
+ api_request=None,
+ path=None,
+ schema=[
+ bigquery.SchemaField.from_api_repr(field)
+ for field in resource["schema"]["fields"]
+ ],
+ first_page_response=resource,
+ max_results=3,
+ )
+ records = rows.to_arrow()
+
+ assert records.column_names == ["name", "number"]
+ assert records["name"].to_pylist() == [
+ "Tiarra",
+ "Timothy",
+ "Tina",
+ ]
+ assert records["number"].to_pylist() == [6, 325, 26]
diff --git a/tests/unit/test_table_pandas.py b/tests/unit/test_table_pandas.py
index dfe512eea..b38568561 100644
--- a/tests/unit/test_table_pandas.py
+++ b/tests/unit/test_table_pandas.py
@@ -15,7 +15,11 @@
import datetime
import decimal
from unittest import mock
-import pkg_resources
+
+try:
+ import importlib.metadata as metadata
+except ImportError:
+ import importlib_metadata as metadata
import pytest
@@ -28,9 +32,9 @@
TEST_PATH = "/v1/project/test-proj/dataset/test-dset/table/test-tbl/data"
if pandas is not None: # pragma: NO COVER
- PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version
+ PANDAS_INSTALLED_VERSION = metadata.version("pandas")
else: # pragma: NO COVER
- PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0")
+ PANDAS_INSTALLED_VERSION = "0.0.0"
@pytest.fixture
@@ -40,9 +44,7 @@ def class_under_test():
return RowIterator
-@pytest.mark.skipif(
- PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason=""
-)
+@pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
def test_to_dataframe_nullable_scalars(monkeypatch, class_under_test):
# See tests/system/test_arrow.py for the actual types we get from the API.
arrow_schema = pyarrow.schema(
@@ -201,3 +203,62 @@ def test_to_dataframe_arrays(monkeypatch, class_under_test):
assert df.dtypes["int64_repeated"].name == "object"
assert tuple(df["int64_repeated"][0]) == (-1, 0, 2)
+
+
+def test_to_dataframe_with_jobs_query_response(class_under_test):
+ resource = {
+ "kind": "bigquery#queryResponse",
+ "schema": {
+ "fields": [
+ {"name": "name", "type": "STRING", "mode": "NULLABLE"},
+ {"name": "number", "type": "INTEGER", "mode": "NULLABLE"},
+ ]
+ },
+ "jobReference": {
+ "projectId": "test-project",
+ "jobId": "job_ocd3cb-N62QIslU7R5qKKa2_427J",
+ "location": "US",
+ },
+ "totalRows": "9",
+ "rows": [
+ {"f": [{"v": "Tiarra"}, {"v": "6"}]},
+ {"f": [{"v": "Timothy"}, {"v": "325"}]},
+ {"f": [{"v": "Tina"}, {"v": "26"}]},
+ {"f": [{"v": "Tierra"}, {"v": "10"}]},
+ {"f": [{"v": "Tia"}, {"v": "17"}]},
+ {"f": [{"v": "Tiara"}, {"v": "22"}]},
+ {"f": [{"v": "Tiana"}, {"v": "6"}]},
+ {"f": [{"v": "Tiffany"}, {"v": "229"}]},
+ {"f": [{"v": "Tiffani"}, {"v": "8"}]},
+ ],
+ "totalBytesProcessed": "154775150",
+ "jobComplete": True,
+ "cacheHit": False,
+ "queryId": "job_ocd3cb-N62QIslU7R5qKKa2_427J",
+ }
+
+ rows = class_under_test(
+ client=None,
+ api_request=None,
+ path=None,
+ schema=[
+ bigquery.SchemaField.from_api_repr(field)
+ for field in resource["schema"]["fields"]
+ ],
+ first_page_response=resource,
+ )
+ df = rows.to_dataframe()
+
+ assert list(df.columns) == ["name", "number"]
+ assert list(df["name"]) == [
+ "Tiarra",
+ "Timothy",
+ "Tina",
+ "Tierra",
+ "Tia",
+ "Tiara",
+ "Tiana",
+ "Tiffany",
+ "Tiffani",
+ ]
+ assert list(df["number"]) == [6, 325, 26, 10, 17, 22, 6, 229, 8]