diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c701359..b8dcb4a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 -# created: 2022-08-09T15:58:56.463048506Z + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 8acb14e..1c4d623 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/.kokoro/release.sh b/.kokoro/release.sh index cd76ff1..3c3380e 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r github/python-dataflow-client/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in new file mode 100644 index 0000000..7718391 --- /dev/null +++ b/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt new file mode 100644 index 0000000..385f2d4 --- /dev/null +++ b/.kokoro/requirements.txt @@ -0,0 +1,472 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/CHANGELOG.md b/CHANGELOG.md index ff801dc..ec2cce9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.6.0](https://github.com/googleapis/python-dataflow-client/compare/v0.5.5...v0.6.0) (2022-09-13) + + +### Features + +* Enable REST transport support ([#139](https://github.com/googleapis/python-dataflow-client/issues/139)) ([e8a64ff](https://github.com/googleapis/python-dataflow-client/commit/e8a64ff142ae8f0ff48736b1611a11740e3fa9a3)) + ## [0.5.5](https://github.com/googleapis/python-dataflow-client/compare/v0.5.4...v0.5.5) (2022-08-11) diff --git a/google/cloud/dataflow_v1beta3/gapic_metadata.json b/google/cloud/dataflow_v1beta3/gapic_metadata.json index 8d17272..ab8a5b6 100644 --- a/google/cloud/dataflow_v1beta3/gapic_metadata.json +++ b/google/cloud/dataflow_v1beta3/gapic_metadata.json @@ -26,6 +26,16 @@ ] } } + }, + "rest": { + "libraryClient": "FlexTemplatesServiceClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } } } }, @@ -110,6 +120,46 @@ ] } } + }, + "rest": { + "libraryClient": "JobsV1Beta3Client", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } } } }, @@ -134,6 +184,16 @@ ] } } + }, + "rest": { + "libraryClient": "MessagesV1Beta3Client", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } } } }, @@ -178,6 +238,26 @@ ] } } + }, + "rest": { + "libraryClient": "MetricsV1Beta3Client", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } } } }, @@ -222,6 +302,26 @@ ] } } + }, + "rest": { + "libraryClient": "SnapshotsV1Beta3Client", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } } } }, @@ -266,6 +366,26 @@ ] } } + }, + "rest": { + "libraryClient": "TemplatesServiceClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } } } } diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py index 9405f05..a7a4f46 100644 --- a/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py @@ -215,6 +215,13 @@ async def launch_flex_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_launch_flex_template(): diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index e91d759..a443b47 100644 --- a/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -39,6 +39,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, FlexTemplatesServiceTransport from .transports.grpc import FlexTemplatesServiceGrpcTransport from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .transports.rest import FlexTemplatesServiceRestTransport class FlexTemplatesServiceClientMeta(type): @@ -54,6 +55,7 @@ class FlexTemplatesServiceClientMeta(type): ) # type: Dict[str, Type[FlexTemplatesServiceTransport]] _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = FlexTemplatesServiceRestTransport def get_transport_class( cls, @@ -327,6 +329,9 @@ def __init__( transport (Union[str, FlexTemplatesServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -419,6 +424,13 @@ def launch_flex_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_launch_flex_template(): diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py index 1568f78..ff6c45e 100644 --- a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py @@ -19,6 +19,7 @@ from .base import FlexTemplatesServiceTransport from .grpc import FlexTemplatesServiceGrpcTransport from .grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .rest import FlexTemplatesServiceRestInterceptor, FlexTemplatesServiceRestTransport # Compile a registry of transports. _transport_registry = ( @@ -26,9 +27,12 @@ ) # type: Dict[str, Type[FlexTemplatesServiceTransport]] _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = FlexTemplatesServiceRestTransport __all__ = ( "FlexTemplatesServiceTransport", "FlexTemplatesServiceGrpcTransport", "FlexTemplatesServiceGrpcAsyncIOTransport", + "FlexTemplatesServiceRestTransport", + "FlexTemplatesServiceRestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py new file mode 100644 index 0000000..1986e26 --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import templates + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import FlexTemplatesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FlexTemplatesServiceRestInterceptor: + """Interceptor for FlexTemplatesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FlexTemplatesServiceRestTransport. + + .. code-block:: python + class MyCustomFlexTemplatesServiceInterceptor(FlexTemplatesServiceRestInterceptor): + def pre_launch_flex_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_launch_flex_template(response): + logging.log(f"Received response: {response}") + + transport = FlexTemplatesServiceRestTransport(interceptor=MyCustomFlexTemplatesServiceInterceptor()) + client = FlexTemplatesServiceClient(transport=transport) + + + """ + + def pre_launch_flex_template( + self, + request: templates.LaunchFlexTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[templates.LaunchFlexTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for launch_flex_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the FlexTemplatesService server. + """ + return request, metadata + + def post_launch_flex_template( + self, response: templates.LaunchFlexTemplateResponse + ) -> templates.LaunchFlexTemplateResponse: + """Post-rpc interceptor for launch_flex_template + + Override in a subclass to manipulate the response + after it is returned by the FlexTemplatesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FlexTemplatesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FlexTemplatesServiceRestInterceptor + + +class FlexTemplatesServiceRestTransport(FlexTemplatesServiceTransport): + """REST backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FlexTemplatesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FlexTemplatesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _LaunchFlexTemplate(FlexTemplatesServiceRestStub): + def __hash__(self): + return hash("LaunchFlexTemplate") + + def __call__( + self, + request: templates.LaunchFlexTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchFlexTemplateResponse: + r"""Call the launch flex template method over HTTP. + + Args: + request (~.templates.LaunchFlexTemplateRequest): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/flexTemplates:launch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_launch_flex_template( + request, metadata + ) + pb_request = templates.LaunchFlexTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.LaunchFlexTemplateResponse() + pb_resp = templates.LaunchFlexTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_launch_flex_template(resp) + return resp + + @property + def launch_flex_template( + self, + ) -> Callable[ + [templates.LaunchFlexTemplateRequest], templates.LaunchFlexTemplateResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LaunchFlexTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FlexTemplatesServiceRestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py index cdcd4fc..19973e7 100644 --- a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py @@ -221,6 +221,13 @@ async def create_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_create_job(): @@ -304,6 +311,13 @@ async def get_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_get_job(): @@ -389,6 +403,13 @@ async def update_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_update_job(): @@ -475,6 +496,13 @@ async def list_jobs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_list_jobs(): @@ -571,6 +599,13 @@ async def aggregated_list_jobs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_aggregated_list_jobs(): @@ -665,6 +700,13 @@ async def check_active_jobs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_check_active_jobs(): @@ -729,6 +771,13 @@ async def snapshot_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_snapshot_job(): diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index 4f334f9..d216313 100644 --- a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -43,6 +43,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, JobsV1Beta3Transport from .transports.grpc import JobsV1Beta3GrpcTransport from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .transports.rest import JobsV1Beta3RestTransport class JobsV1Beta3ClientMeta(type): @@ -56,6 +57,7 @@ class JobsV1Beta3ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] _transport_registry["grpc"] = JobsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = JobsV1Beta3RestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, JobsV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -428,6 +433,13 @@ def create_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_create_job(): @@ -512,6 +524,13 @@ def get_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_get_job(): @@ -598,6 +617,13 @@ def update_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_update_job(): @@ -685,6 +711,13 @@ def list_jobs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_list_jobs(): @@ -782,6 +815,13 @@ def aggregated_list_jobs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_aggregated_list_jobs(): @@ -877,6 +917,13 @@ def check_active_jobs( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_check_active_jobs(): @@ -942,6 +989,13 @@ def snapshot_job( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_snapshot_job(): diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py index 75f38da..92553cc 100644 --- a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import JobsV1Beta3Transport from .grpc import JobsV1Beta3GrpcTransport from .grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .rest import JobsV1Beta3RestInterceptor, JobsV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] _transport_registry["grpc"] = JobsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = JobsV1Beta3RestTransport __all__ = ( "JobsV1Beta3Transport", "JobsV1Beta3GrpcTransport", "JobsV1Beta3GrpcAsyncIOTransport", + "JobsV1Beta3RestTransport", + "JobsV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py new file mode 100644 index 0000000..1e8598a --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py @@ -0,0 +1,933 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import jobs, snapshots + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import JobsV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class JobsV1Beta3RestInterceptor: + """Interceptor for JobsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the JobsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomJobsV1Beta3Interceptor(JobsV1Beta3RestInterceptor): + def pre_aggregated_list_jobs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list_jobs(response): + logging.log(f"Received response: {response}") + + def pre_check_active_jobs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_active_jobs(response): + logging.log(f"Received response: {response}") + + def pre_create_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job(response): + logging.log(f"Received response: {response}") + + def pre_get_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(response): + logging.log(f"Received response: {response}") + + def pre_list_jobs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(response): + logging.log(f"Received response: {response}") + + def pre_snapshot_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_snapshot_job(response): + logging.log(f"Received response: {response}") + + def pre_update_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job(response): + logging.log(f"Received response: {response}") + + transport = JobsV1Beta3RestTransport(interceptor=MyCustomJobsV1Beta3Interceptor()) + client = JobsV1Beta3Client(transport=transport) + + + """ + + def pre_aggregated_list_jobs( + self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_aggregated_list_jobs( + self, response: jobs.ListJobsResponse + ) -> jobs.ListJobsResponse: + """Post-rpc interceptor for aggregated_list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_create_job( + self, request: jobs.CreateJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.CreateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_create_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for create_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_job( + self, request: jobs.GetJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.GetJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_get_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for get_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_list_jobs( + self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_snapshot_job( + self, request: jobs.SnapshotJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.SnapshotJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for snapshot_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_snapshot_job(self, response: snapshots.Snapshot) -> snapshots.Snapshot: + """Post-rpc interceptor for snapshot_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_update_job( + self, request: jobs.UpdateJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.UpdateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_update_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for update_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class JobsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: JobsV1Beta3RestInterceptor + + +class JobsV1Beta3RestTransport(JobsV1Beta3Transport): + """REST backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[JobsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or JobsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedListJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("AggregatedListJobs") + + def __call__( + self, + request: jobs.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.ListJobsResponse: + r"""Call the aggregated list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs:aggregated", + }, + ] + request, metadata = self._interceptor.pre_aggregated_list_jobs( + request, metadata + ) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list_jobs(resp) + return resp + + class _CheckActiveJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("CheckActiveJobs") + + def __call__( + self, + request: jobs.CheckActiveJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.CheckActiveJobsResponse: + raise RuntimeError( + "Cannot define a method without a valid 'google.api.http' annotation." + ) + + class _CreateJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("CreateJob") + + def __call__( + self, + request: jobs.CreateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the create job method over HTTP. + + Args: + request (~.jobs.CreateJobRequest): + The request object. Request to create a Cloud Dataflow + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs", + "body": "job", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/jobs", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_create_job(request, metadata) + pb_request = jobs.CreateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + return resp + + class _GetJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("GetJob") + + def __call__( + self, + request: jobs.GetJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.jobs.GetJobRequest): + The request object. Request to get the state of a Cloud + Dataflow job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}", + }, + ] + request, metadata = self._interceptor.pre_get_job(request, metadata) + pb_request = jobs.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + return resp + + class _ListJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("ListJobs") + + def __call__( + self, + request: jobs.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs", + }, + ] + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + return resp + + class _SnapshotJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("SnapshotJob") + + def __call__( + self, + request: jobs.SnapshotJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Call the snapshot job method over HTTP. + + Args: + request (~.jobs.SnapshotJobRequest): + The request object. Request to create a snapshot of a + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.Snapshot: + Represents a snapshot of a job. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}:snapshot", + "body": "*", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}:snapshot", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_snapshot_job(request, metadata) + pb_request = jobs.SnapshotJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.Snapshot() + pb_resp = snapshots.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_snapshot_job(resp) + return resp + + class _UpdateJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("UpdateJob") + + def __call__( + self, + request: jobs.UpdateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the update job method over HTTP. + + Args: + request (~.jobs.UpdateJobRequest): + The request object. Request to update a Cloud Dataflow + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}", + "body": "job", + }, + { + "method": "put", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_update_job(request, metadata) + pb_request = jobs.UpdateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + return resp + + @property + def aggregated_list_jobs( + self, + ) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def check_active_jobs( + self, + ) -> Callable[[jobs.CheckActiveJobsRequest], jobs.CheckActiveJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckActiveJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job(self) -> Callable[[jobs.CreateJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs(self) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def snapshot_job(self) -> Callable[[jobs.SnapshotJobRequest], snapshots.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SnapshotJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("JobsV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py index 642a946..9709399 100644 --- a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py @@ -223,6 +223,13 @@ async def list_job_messages( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_list_job_messages(): diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index 4c80c85..435a92f 100644 --- a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -40,6 +40,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, MessagesV1Beta3Transport from .transports.grpc import MessagesV1Beta3GrpcTransport from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .transports.rest import MessagesV1Beta3RestTransport class MessagesV1Beta3ClientMeta(type): @@ -55,6 +56,7 @@ class MessagesV1Beta3ClientMeta(type): ) # type: Dict[str, Type[MessagesV1Beta3Transport]] _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = MessagesV1Beta3RestTransport def get_transport_class( cls, @@ -328,6 +330,9 @@ def __init__( transport (Union[str, MessagesV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -428,6 +433,13 @@ def list_job_messages( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_list_job_messages(): diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py index 401cb80..67894a0 100644 --- a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import MessagesV1Beta3Transport from .grpc import MessagesV1Beta3GrpcTransport from .grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .rest import MessagesV1Beta3RestInterceptor, MessagesV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = MessagesV1Beta3RestTransport __all__ = ( "MessagesV1Beta3Transport", "MessagesV1Beta3GrpcTransport", "MessagesV1Beta3GrpcAsyncIOTransport", + "MessagesV1Beta3RestTransport", + "MessagesV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py new file mode 100644 index 0000000..c08b7db --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import messages + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MessagesV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MessagesV1Beta3RestInterceptor: + """Interceptor for MessagesV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MessagesV1Beta3RestTransport. + + .. code-block:: python + class MyCustomMessagesV1Beta3Interceptor(MessagesV1Beta3RestInterceptor): + def pre_list_job_messages(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_messages(response): + logging.log(f"Received response: {response}") + + transport = MessagesV1Beta3RestTransport(interceptor=MyCustomMessagesV1Beta3Interceptor()) + client = MessagesV1Beta3Client(transport=transport) + + + """ + + def pre_list_job_messages( + self, + request: messages.ListJobMessagesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[messages.ListJobMessagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_messages + + Override in a subclass to manipulate the request or metadata + before they are sent to the MessagesV1Beta3 server. + """ + return request, metadata + + def post_list_job_messages( + self, response: messages.ListJobMessagesResponse + ) -> messages.ListJobMessagesResponse: + """Post-rpc interceptor for list_job_messages + + Override in a subclass to manipulate the response + after it is returned by the MessagesV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MessagesV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MessagesV1Beta3RestInterceptor + + +class MessagesV1Beta3RestTransport(MessagesV1Beta3Transport): + """REST backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MessagesV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MessagesV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListJobMessages(MessagesV1Beta3RestStub): + def __hash__(self): + return hash("ListJobMessages") + + def __call__( + self, + request: messages.ListJobMessagesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> messages.ListJobMessagesResponse: + r"""Call the list job messages method over HTTP. + + Args: + request (~.messages.ListJobMessagesRequest): + The request object. Request to list job messages. Up to max_results messages + will be returned in the time range specified starting + with the oldest messages first. If no time range is + specified the results with start with the oldest + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.messages.ListJobMessagesResponse: + Response to a request to list job + messages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/messages", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}/messages", + }, + ] + request, metadata = self._interceptor.pre_list_job_messages( + request, metadata + ) + pb_request = messages.ListJobMessagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = messages.ListJobMessagesResponse() + pb_resp = messages.ListJobMessagesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_messages(resp) + return resp + + @property + def list_job_messages( + self, + ) -> Callable[[messages.ListJobMessagesRequest], messages.ListJobMessagesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobMessages(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MessagesV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py index cd1b4ac..1709efb 100644 --- a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py @@ -225,6 +225,13 @@ async def get_job_metrics( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_get_job_metrics(): @@ -314,6 +321,13 @@ async def get_job_execution_details( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_get_job_execution_details(): @@ -409,6 +423,13 @@ async def get_stage_execution_details( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_get_stage_execution_details(): diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index 0eda64e..b032f51 100644 --- a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -42,6 +42,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, MetricsV1Beta3Transport from .transports.grpc import MetricsV1Beta3GrpcTransport from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .transports.rest import MetricsV1Beta3RestTransport class MetricsV1Beta3ClientMeta(type): @@ -57,6 +58,7 @@ class MetricsV1Beta3ClientMeta(type): ) # type: Dict[str, Type[MetricsV1Beta3Transport]] _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = MetricsV1Beta3RestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, MetricsV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -430,6 +435,13 @@ def get_job_metrics( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_get_job_metrics(): @@ -520,6 +532,13 @@ def get_job_execution_details( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_get_job_execution_details(): @@ -618,6 +637,13 @@ def get_stage_execution_details( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_get_stage_execution_details(): diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py index fd061be..38b52b0 100644 --- a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import MetricsV1Beta3Transport from .grpc import MetricsV1Beta3GrpcTransport from .grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .rest import MetricsV1Beta3RestInterceptor, MetricsV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = MetricsV1Beta3RestTransport __all__ = ( "MetricsV1Beta3Transport", "MetricsV1Beta3GrpcTransport", "MetricsV1Beta3GrpcAsyncIOTransport", + "MetricsV1Beta3RestTransport", + "MetricsV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py new file mode 100644 index 0000000..f889276 --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import metrics + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MetricsV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MetricsV1Beta3RestInterceptor: + """Interceptor for MetricsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetricsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomMetricsV1Beta3Interceptor(MetricsV1Beta3RestInterceptor): + def pre_get_job_execution_details(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_execution_details(response): + logging.log(f"Received response: {response}") + + def pre_get_job_metrics(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_metrics(response): + logging.log(f"Received response: {response}") + + def pre_get_stage_execution_details(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stage_execution_details(response): + logging.log(f"Received response: {response}") + + transport = MetricsV1Beta3RestTransport(interceptor=MyCustomMetricsV1Beta3Interceptor()) + client = MetricsV1Beta3Client(transport=transport) + + + """ + + def pre_get_job_execution_details( + self, + request: metrics.GetJobExecutionDetailsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metrics.GetJobExecutionDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_execution_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_job_execution_details( + self, response: metrics.JobExecutionDetails + ) -> metrics.JobExecutionDetails: + """Post-rpc interceptor for get_job_execution_details + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_job_metrics( + self, request: metrics.GetJobMetricsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metrics.GetJobMetricsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_metrics + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_job_metrics(self, response: metrics.JobMetrics) -> metrics.JobMetrics: + """Post-rpc interceptor for get_job_metrics + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_stage_execution_details( + self, + request: metrics.GetStageExecutionDetailsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metrics.GetStageExecutionDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stage_execution_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_stage_execution_details( + self, response: metrics.StageExecutionDetails + ) -> metrics.StageExecutionDetails: + """Post-rpc interceptor for get_stage_execution_details + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetricsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetricsV1Beta3RestInterceptor + + +class MetricsV1Beta3RestTransport(MetricsV1Beta3Transport): + """REST backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MetricsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetricsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetJobExecutionDetails(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetJobExecutionDetails") + + def __call__( + self, + request: metrics.GetJobExecutionDetailsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobExecutionDetails: + r"""Call the get job execution details method over HTTP. + + Args: + request (~.metrics.GetJobExecutionDetailsRequest): + The request object. Request to get job execution details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.JobExecutionDetails: + Information about the execution of a + job. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/executionDetails", + }, + ] + request, metadata = self._interceptor.pre_get_job_execution_details( + request, metadata + ) + pb_request = metrics.GetJobExecutionDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.JobExecutionDetails() + pb_resp = metrics.JobExecutionDetails.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_execution_details(resp) + return resp + + class _GetJobMetrics(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetJobMetrics") + + def __call__( + self, + request: metrics.GetJobMetricsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobMetrics: + r"""Call the get job metrics method over HTTP. + + Args: + request (~.metrics.GetJobMetricsRequest): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; + time-series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/metrics", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}/metrics", + }, + ] + request, metadata = self._interceptor.pre_get_job_metrics(request, metadata) + pb_request = metrics.GetJobMetricsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.JobMetrics() + pb_resp = metrics.JobMetrics.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_metrics(resp) + return resp + + class _GetStageExecutionDetails(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetStageExecutionDetails") + + def __call__( + self, + request: metrics.GetStageExecutionDetailsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.StageExecutionDetails: + r"""Call the get stage execution + details method over HTTP. + + Args: + request (~.metrics.GetStageExecutionDetailsRequest): + The request object. Request to get information about a + particular execution stage of a job. + Currently only tracked for Batch jobs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.StageExecutionDetails: + Information about the workers and + work items within a stage. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/stages/{stage_id}/executionDetails", + }, + ] + request, metadata = self._interceptor.pre_get_stage_execution_details( + request, metadata + ) + pb_request = metrics.GetStageExecutionDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.StageExecutionDetails() + pb_resp = metrics.StageExecutionDetails.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stage_execution_details(resp) + return resp + + @property + def get_job_execution_details( + self, + ) -> Callable[[metrics.GetJobExecutionDetailsRequest], metrics.JobExecutionDetails]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobExecutionDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_metrics( + self, + ) -> Callable[[metrics.GetJobMetricsRequest], metrics.JobMetrics]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobMetrics(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stage_execution_details( + self, + ) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], metrics.StageExecutionDetails + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStageExecutionDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MetricsV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py index 7dee335..5af9753 100644 --- a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py @@ -217,6 +217,13 @@ async def get_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_get_snapshot(): @@ -293,6 +300,13 @@ async def delete_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_delete_snapshot(): @@ -368,6 +382,13 @@ async def list_snapshots( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_list_snapshots(): diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index 5c7cadd..015ead3 100644 --- a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -42,6 +42,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, SnapshotsV1Beta3Transport from .transports.grpc import SnapshotsV1Beta3GrpcTransport from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .transports.rest import SnapshotsV1Beta3RestTransport class SnapshotsV1Beta3ClientMeta(type): @@ -57,6 +58,7 @@ class SnapshotsV1Beta3ClientMeta(type): ) # type: Dict[str, Type[SnapshotsV1Beta3Transport]] _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = SnapshotsV1Beta3RestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, SnapshotsV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -422,6 +427,13 @@ def get_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_get_snapshot(): @@ -499,6 +511,13 @@ def delete_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_delete_snapshot(): @@ -575,6 +594,13 @@ def list_snapshots( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_list_snapshots(): diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py index 1b1001f..4e2cb30 100644 --- a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import SnapshotsV1Beta3Transport from .grpc import SnapshotsV1Beta3GrpcTransport from .grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .rest import SnapshotsV1Beta3RestInterceptor, SnapshotsV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = SnapshotsV1Beta3RestTransport __all__ = ( "SnapshotsV1Beta3Transport", "SnapshotsV1Beta3GrpcTransport", "SnapshotsV1Beta3GrpcAsyncIOTransport", + "SnapshotsV1Beta3RestTransport", + "SnapshotsV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py new file mode 100644 index 0000000..e92b4df --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import snapshots + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SnapshotsV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SnapshotsV1Beta3RestInterceptor: + """Interceptor for SnapshotsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SnapshotsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomSnapshotsV1Beta3Interceptor(SnapshotsV1Beta3RestInterceptor): + def pre_delete_snapshot(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_snapshot(response): + logging.log(f"Received response: {response}") + + def pre_get_snapshot(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_snapshot(response): + logging.log(f"Received response: {response}") + + def pre_list_snapshots(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_snapshots(response): + logging.log(f"Received response: {response}") + + transport = SnapshotsV1Beta3RestTransport(interceptor=MyCustomSnapshotsV1Beta3Interceptor()) + client = SnapshotsV1Beta3Client(transport=transport) + + + """ + + def pre_delete_snapshot( + self, + request: snapshots.DeleteSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[snapshots.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_delete_snapshot( + self, response: snapshots.DeleteSnapshotResponse + ) -> snapshots.DeleteSnapshotResponse: + """Post-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_snapshot( + self, request: snapshots.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[snapshots.GetSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_get_snapshot(self, response: snapshots.Snapshot) -> snapshots.Snapshot: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_list_snapshots( + self, + request: snapshots.ListSnapshotsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[snapshots.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_list_snapshots( + self, response: snapshots.ListSnapshotsResponse + ) -> snapshots.ListSnapshotsResponse: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SnapshotsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: SnapshotsV1Beta3RestInterceptor + + +class SnapshotsV1Beta3RestTransport(SnapshotsV1Beta3Transport): + """REST backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SnapshotsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SnapshotsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeleteSnapshot(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("DeleteSnapshot") + + def __call__( + self, + request: snapshots.DeleteSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.DeleteSnapshotResponse: + r"""Call the delete snapshot method over HTTP. + + Args: + request (~.snapshots.DeleteSnapshotRequest): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}", + }, + { + "method": "delete", + "uri": "/v1b3/projects/{project_id}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) + pb_request = snapshots.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.DeleteSnapshotResponse() + pb_resp = snapshots.DeleteSnapshotResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_snapshot(resp) + return resp + + class _GetSnapshot(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("GetSnapshot") + + def __call__( + self, + request: snapshots.GetSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Call the get snapshot method over HTTP. + + Args: + request (~.snapshots.GetSnapshotRequest): + The request object. Request to get information about a + snapshot + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.Snapshot: + Represents a snapshot of a job. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/snapshots/{snapshot_id}", + }, + ] + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) + pb_request = snapshots.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.Snapshot() + pb_resp = snapshots.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_snapshot(resp) + return resp + + class _ListSnapshots(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("ListSnapshots") + + def __call__( + self, + request: snapshots.ListSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.ListSnapshotsResponse: + r"""Call the list snapshots method over HTTP. + + Args: + request (~.snapshots.ListSnapshotsRequest): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.ListSnapshotsResponse: + List of snapshots. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/snapshots", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/snapshots", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) + pb_request = snapshots.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.ListSnapshotsResponse() + pb_resp = snapshots.ListSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_snapshots(resp) + return resp + + @property + def delete_snapshot( + self, + ) -> Callable[[snapshots.DeleteSnapshotRequest], snapshots.DeleteSnapshotResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_snapshot( + self, + ) -> Callable[[snapshots.GetSnapshotRequest], snapshots.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_snapshots( + self, + ) -> Callable[[snapshots.ListSnapshotsRequest], snapshots.ListSnapshotsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SnapshotsV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py index 3b718bf..32af536 100644 --- a/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py +++ b/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py @@ -217,6 +217,13 @@ async def create_job_from_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_create_job_from_template(): @@ -295,6 +302,13 @@ async def launch_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_launch_template(): @@ -372,6 +386,13 @@ async def get_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 async def sample_get_template(): diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/google/cloud/dataflow_v1beta3/services/templates_service/client.py index a09f9e2..ef7eefc 100644 --- a/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -42,6 +42,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, TemplatesServiceTransport from .transports.grpc import TemplatesServiceGrpcTransport from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .transports.rest import TemplatesServiceRestTransport class TemplatesServiceClientMeta(type): @@ -57,6 +58,7 @@ class TemplatesServiceClientMeta(type): ) # type: Dict[str, Type[TemplatesServiceTransport]] _transport_registry["grpc"] = TemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TemplatesServiceRestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, TemplatesServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -422,6 +427,13 @@ def create_job_from_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_create_job_from_template(): @@ -501,6 +513,13 @@ def launch_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_launch_template(): @@ -579,6 +598,13 @@ def get_template( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 def sample_get_template(): diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py index 6cebe12..410d70c 100644 --- a/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import TemplatesServiceTransport from .grpc import TemplatesServiceGrpcTransport from .grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .rest import TemplatesServiceRestInterceptor, TemplatesServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] _transport_registry["grpc"] = TemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TemplatesServiceRestTransport __all__ = ( "TemplatesServiceTransport", "TemplatesServiceGrpcTransport", "TemplatesServiceGrpcAsyncIOTransport", + "TemplatesServiceRestTransport", + "TemplatesServiceRestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py b/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py new file mode 100644 index 0000000..64b68b6 --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py @@ -0,0 +1,558 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import jobs, templates + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TemplatesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TemplatesServiceRestInterceptor: + """Interceptor for TemplatesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TemplatesServiceRestTransport. + + .. code-block:: python + class MyCustomTemplatesServiceInterceptor(TemplatesServiceRestInterceptor): + def pre_create_job_from_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_from_template(response): + logging.log(f"Received response: {response}") + + def pre_get_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_template(response): + logging.log(f"Received response: {response}") + + def pre_launch_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_launch_template(response): + logging.log(f"Received response: {response}") + + transport = TemplatesServiceRestTransport(interceptor=MyCustomTemplatesServiceInterceptor()) + client = TemplatesServiceClient(transport=transport) + + + """ + + def pre_create_job_from_template( + self, + request: templates.CreateJobFromTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[templates.CreateJobFromTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_from_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_create_job_from_template(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for create_job_from_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + def pre_get_template( + self, request: templates.GetTemplateRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[templates.GetTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_get_template( + self, response: templates.GetTemplateResponse + ) -> templates.GetTemplateResponse: + """Post-rpc interceptor for get_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + def pre_launch_template( + self, + request: templates.LaunchTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[templates.LaunchTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for launch_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_launch_template( + self, response: templates.LaunchTemplateResponse + ) -> templates.LaunchTemplateResponse: + """Post-rpc interceptor for launch_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TemplatesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TemplatesServiceRestInterceptor + + +class TemplatesServiceRestTransport(TemplatesServiceTransport): + """REST backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TemplatesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TemplatesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateJobFromTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("CreateJobFromTemplate") + + def __call__( + self, + request: templates.CreateJobFromTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the create job from template method over HTTP. + + Args: + request (~.templates.CreateJobFromTemplateRequest): + The request object. A request to create a Cloud Dataflow + job from a template. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/templates", + "body": "*", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/templates", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_job_from_template( + request, metadata + ) + pb_request = templates.CreateJobFromTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_from_template(resp) + return resp + + class _GetTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("GetTemplate") + + def __call__( + self, + request: templates.GetTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.GetTemplateResponse: + r"""Call the get template method over HTTP. + + Args: + request (~.templates.GetTemplateRequest): + The request object. A request to retrieve a Cloud + Dataflow job template. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/templates:get", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/templates:get", + }, + ] + request, metadata = self._interceptor.pre_get_template(request, metadata) + pb_request = templates.GetTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.GetTemplateResponse() + pb_resp = templates.GetTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_template(resp) + return resp + + class _LaunchTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("LaunchTemplate") + + def __call__( + self, + request: templates.LaunchTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchTemplateResponse: + r"""Call the launch template method over HTTP. + + Args: + request (~.templates.LaunchTemplateRequest): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/templates:launch", + "body": "launch_parameters", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/templates:launch", + "body": "launch_parameters", + }, + ] + request, metadata = self._interceptor.pre_launch_template(request, metadata) + pb_request = templates.LaunchTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.LaunchTemplateResponse() + pb_resp = templates.LaunchTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_launch_template(resp) + return resp + + @property + def create_job_from_template( + self, + ) -> Callable[[templates.CreateJobFromTemplateRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobFromTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_template( + self, + ) -> Callable[[templates.GetTemplateRequest], templates.GetTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def launch_template( + self, + ) -> Callable[[templates.LaunchTemplateRequest], templates.LaunchTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LaunchTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TemplatesServiceRestTransport",) diff --git a/mypy.ini b/mypy.ini index 4505b48..574c5ae 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/noxfile.py b/noxfile.py index cc39f3b..ffe9f35 100644 --- a/noxfile.py +++ b/noxfile.py @@ -189,7 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -369,7 +371,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/renovate.json b/renovate.json index c21036d..39b2a0e 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } diff --git a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py index 5357fdd..0a4fc7b 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py index 3f10948..9fb332d 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] +# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_4148a07f] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_launch_flex_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] +# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_4148a07f] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py new file mode 100644 index 0000000..2cc8a42 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchFlexTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_a4f75f91] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_launch_flex_template(): + # Create a client + client = dataflow_v1beta3.FlexTemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchFlexTemplateRequest( + ) + + # Make the request + response = client.launch_flex_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_a4f75f91] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py index 055273c..701ae61 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py similarity index 78% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py index 5c7e8ee..a45077e 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_0d901b38] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_aggregated_list_jobs(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_0d901b38] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py new file mode 100644 index 0000000..778a4f1 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_26f07383] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_aggregated_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.aggregated_list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_26f07383] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py index fb98186..eb9e815 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py index cc112d4..d6c5c9a 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_aab49b35] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_check_active_jobs(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_aab49b35] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py new file mode 100644 index 0000000..dc9691c --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckActiveJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_baed931b] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_check_active_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CheckActiveJobsRequest( + ) + + # Make the request + response = client.check_active_jobs(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_baed931b] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py index 7ea8825..aa6081a 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py similarity index 70% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py index 8336586..406ead7 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_2a1b0208] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_create_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_2a1b0208] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py new file mode 100644 index 0000000..93aadf8 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_ad13d605] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_create_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobRequest( + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_ad13d605] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py index bb903dd..addc3ab 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py similarity index 71% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py index 08c216a..9980879 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_3e418026] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_get_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_3e418026] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py new file mode 100644 index 0000000..364e642 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_45212860] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobRequest( + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_45212860] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py index b3d657a..c4084d0 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py similarity index 71% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py index a15e4b5..8ebfabf 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_97a60855] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_list_jobs(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_97a60855] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py new file mode 100644 index 0000000..633ac93 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_bfb75b4c] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_bfb75b4c] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py index db6128d..7be3e3f 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py similarity index 74% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py index 1cc8d21..49a3645 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_60f4d8fe] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_snapshot_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_60f4d8fe] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py new file mode 100644 index 0000000..a767e86 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SnapshotJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_ff3cb8bd] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_snapshot_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.SnapshotJobRequest( + ) + + # Make the request + response = client.snapshot_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_ff3cb8bd] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py index 872b840..a5b58a1 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py similarity index 70% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py index 1808f65..0eba295 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_8db1fd08] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_update_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_8db1fd08] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py new file mode 100644 index 0000000..c3dac71 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_e2fa191d] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_update_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.UpdateJobRequest( + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_e2fa191d] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py index 5dfa45c..8f449c0 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py index 18e6b11..82358ed 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] +# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_04dec136] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_list_job_messages(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] +# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_04dec136] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py new file mode 100644 index 0000000..1aa793d --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_f19ed68d] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_job_messages(): + # Create a client + client = dataflow_v1beta3.MessagesV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobMessagesRequest( + ) + + # Make the request + page_result = client.list_job_messages(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_f19ed68d] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py index 604072a..97150ab 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py index a3f3667..a1e521e 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_a4ff4d57] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_get_job_execution_details(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_a4ff4d57] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py new file mode 100644 index 0000000..211b983 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_b7550163] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_job_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_b7550163] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py index 19eae53..c285799 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py index b3681c4..54a22ec 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_02835968] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_get_job_metrics(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_02835968] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py new file mode 100644 index 0000000..ffc68c4 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_59dce217] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job_metrics(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobMetricsRequest( + ) + + # Make the request + response = client.get_job_metrics(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_59dce217] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py index b1b620c..431fc92 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py index 9e4ae13..76af1fd 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_4acb3ded] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_get_stage_execution_details(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_4acb3ded] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py new file mode 100644 index 0000000..f88599e --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStageExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_9490a11d] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_stage_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetStageExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_stage_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_9490a11d] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py index f210c6a..a3d83e7 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py index 2a69716..cc7f9e5 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_34b1dfd7] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_delete_snapshot(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_34b1dfd7] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py new file mode 100644 index 0000000..938c6e9 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_567e01e6] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_delete_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.DeleteSnapshotRequest( + ) + + # Make the request + response = client.delete_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_567e01e6] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py index f153028..b95e491 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py index c9ac9f9..9eb621a 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_846de01f] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_get_snapshot(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_846de01f] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py new file mode 100644 index 0000000..f4ef41f --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_ba1ca2b6] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetSnapshotRequest( + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_ba1ca2b6] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py index db185c6..e8303a0 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py index 0bc9dd2..d6955d2 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_425e5024] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -41,4 +48,4 @@ def sample_list_snapshots(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_425e5024] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py new file mode 100644 index 0000000..a8fdc15 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_6da08cb9] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_snapshots(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListSnapshotsRequest( + ) + + # Make the request + response = client.list_snapshots(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_6da08cb9] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py index ca4b2c5..e21cec8 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py index 3e117b3..3c0a699 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] +# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_9e26a4bf] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_create_job_from_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] +# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_9e26a4bf] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py new file mode 100644 index 0000000..2470a87 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobFromTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_e2cb482f] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_create_job_from_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobFromTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.create_job_from_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_e2cb482f] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py index 4c17136..8760665 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py index dfdfece..9691908 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] +# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_07cd261a] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_get_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] +# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_07cd261a] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py new file mode 100644 index 0000000..88cf605 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_732e6209] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.get_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_732e6209] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py index a7a70a1..5d00450 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py @@ -24,6 +24,13 @@ # [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py similarity index 77% rename from samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py index 9dc8d08..98b969f 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py @@ -23,7 +23,14 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] +# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_140179ca] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import dataflow_v1beta3 @@ -42,4 +49,4 @@ def sample_launch_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] +# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_140179ca] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py new file mode 100644 index 0000000..74d62b9 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_77764eb9] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_launch_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.launch_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_77764eb9] diff --git a/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json b/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json index 72ed241..ac911cf 100644 --- a/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json +++ b/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json @@ -55,33 +55,33 @@ "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -125,43 +125,119 @@ "shortName": "launch_flex_template" }, "description": "Sample for LaunchFlexTemplate", - "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py", + "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient", + "shortName": "FlexTemplatesServiceClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient.launch_flex_template", + "method": { + "fullName": "google.dataflow.v1beta3.FlexTemplatesService.LaunchFlexTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.FlexTemplatesService", + "shortName": "FlexTemplatesService" + }, + "shortName": "LaunchFlexTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse", + "shortName": "launch_flex_template" + }, + "description": "Sample for LaunchFlexTemplate", + "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, { "end": 40, "start": 38, - "type": "REQUEST_EXECUTION" + "type": "CLIENT_INITIALIZATION" }, { "end": 44, "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py" + "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py" }, { "canonical": true, @@ -208,33 +284,33 @@ "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -278,65 +354,64 @@ "shortName": "aggregated_list_jobs" }, "description": "Sample for AggregatedListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.check_active_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.aggregated_list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CheckActiveJobs" + "shortName": "AggregatedListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -351,56 +426,57 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", - "shortName": "check_active_jobs" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager", + "shortName": "aggregated_list_jobs" }, - "description": "Sample for CheckActiveJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py", + "description": "Sample for AggregatedListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", "segments": [ { - "end": 43, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.check_active_jobs", "method": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", "service": { @@ -431,65 +507,64 @@ "shortName": "check_active_jobs" }, "description": "Sample for CheckActiveJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.create_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CreateJob" + "shortName": "CheckActiveJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" }, { "name": "retry", @@ -504,47 +579,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job" + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" }, - "description": "Sample for CreateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py", + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py" }, { "canonical": true, @@ -553,19 +628,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CreateJob" + "shortName": "CheckActiveJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" }, { "name": "retry", @@ -580,47 +655,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job" + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" }, - "description": "Sample for CreateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py", + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py" }, { "canonical": true, @@ -630,19 +705,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.get_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.create_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "GetJob" + "shortName": "CreateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" }, { "name": "retry", @@ -658,46 +733,46 @@ } ], "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "get_job" + "shortName": "create_job" }, - "description": "Sample for GetJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py", + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py" }, { "canonical": true, @@ -706,19 +781,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "GetJob" + "shortName": "CreateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" }, { "name": "retry", @@ -734,68 +809,67 @@ } ], "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "get_job" + "shortName": "create_job" }, - "description": "Sample for GetJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py", + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.list_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "ListJobs" + "shortName": "CreateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" }, { "name": "retry", @@ -810,68 +884,69 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job" }, - "description": "Sample for ListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py", + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.get_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "ListJobs" + "shortName": "GetJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" }, { "name": "retry", @@ -886,69 +961,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", - "shortName": "list_jobs" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" }, - "description": "Sample for ListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py", + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.snapshot_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "SnapshotJob" + "shortName": "GetJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" }, { "name": "retry", @@ -963,47 +1037,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "snapshot_job" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" }, - "description": "Sample for SnapshotJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py", + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py" }, { "canonical": true, @@ -1012,19 +1086,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "SnapshotJob" + "shortName": "GetJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" }, { "name": "retry", @@ -1039,47 +1113,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "snapshot_job" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" }, - "description": "Sample for SnapshotJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py", + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py" }, { "canonical": true, @@ -1089,19 +1163,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.update_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "UpdateJob" + "shortName": "ListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -1116,47 +1190,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "update_job" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" }, - "description": "Sample for UpdateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py", + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async", "segments": [ { - "end": 43, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py" }, { "canonical": true, @@ -1165,19 +1239,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "UpdateJob" + "shortName": "ListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -1192,69 +1266,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "update_job" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", + "shortName": "list_jobs" }, - "description": "Sample for UpdateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py", + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", "segments": [ { - "end": 43, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient", - "shortName": "MessagesV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient.list_job_messages", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", "service": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", - "shortName": "MessagesV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "ListJobMessages" + "shortName": "ListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -1269,68 +1342,69 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager", - "shortName": "list_job_messages" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", + "shortName": "list_jobs" }, - "description": "Sample for ListJobMessages", - "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py", + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", - "shortName": "MessagesV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.snapshot_job", "method": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", "service": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", - "shortName": "MessagesV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "ListJobMessages" + "shortName": "SnapshotJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" }, { "name": "retry", @@ -1345,69 +1419,1366 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", - "shortName": "list_job_messages" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" }, - "description": "Sample for ListJobMessages", - "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py", + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "GetJobExecutionDetails" + "shortName": "SnapshotJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" + }, + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "SnapshotJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" + }, + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.update_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "UpdateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" + }, + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "UpdateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" + }, + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "UpdateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" + }, + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient", + "shortName": "MessagesV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", + "shortName": "MessagesV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", + "shortName": "MessagesV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.delete_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "DeleteSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" }, { "name": "retry", @@ -1422,68 +2793,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager", - "shortName": "get_job_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" }, - "description": "Sample for GetJobExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py", + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetJobExecutionDetails" + "shortName": "DeleteSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" }, { "name": "retry", @@ -1498,69 +2869,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", - "shortName": "get_job_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" }, - "description": "Sample for GetJobExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py", + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_metrics", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetJobMetrics" + "shortName": "DeleteSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" }, { "name": "retry", @@ -1575,68 +2945,69 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", - "shortName": "get_job_metrics" + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" }, - "description": "Sample for GetJobMetrics", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py", + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.get_snapshot", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetJobMetrics" + "shortName": "GetSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" }, { "name": "retry", @@ -1651,69 +3022,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", - "shortName": "get_job_metrics" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" }, - "description": "Sample for GetJobMetrics", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py", + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_stage_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetStageExecutionDetails" + "shortName": "GetSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" }, { "name": "retry", @@ -1728,68 +3098,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager", - "shortName": "get_stage_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" }, - "description": "Sample for GetStageExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py", + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetStageExecutionDetails" + "shortName": "GetSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" }, { "name": "retry", @@ -1804,47 +3174,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", - "shortName": "get_stage_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" }, - "description": "Sample for GetStageExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py", + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", "segments": [ { - "end": 44, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py" }, { "canonical": true, @@ -1854,19 +3224,19 @@ "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", "shortName": "SnapshotsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.delete_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.list_snapshots", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", "service": { "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", "shortName": "SnapshotsV1Beta3" }, - "shortName": "DeleteSnapshot" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" }, { "name": "retry", @@ -1881,47 +3251,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", - "shortName": "delete_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" }, - "description": "Sample for DeleteSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py", + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py" }, { "canonical": true, @@ -1930,19 +3300,19 @@ "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", "service": { "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", "shortName": "SnapshotsV1Beta3" }, - "shortName": "DeleteSnapshot" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" }, { "name": "retry", @@ -1957,69 +3327,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", - "shortName": "delete_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" }, - "description": "Sample for DeleteSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py", + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.get_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", "service": { "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetSnapshot" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" }, { "name": "retry", @@ -2034,68 +3403,69 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "get_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" }, - "description": "Sample for GetSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py", + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.create_job_from_template", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" }, - "shortName": "GetSnapshot" + "shortName": "CreateJobFromTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" }, { "name": "retry", @@ -2110,69 +3480,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "get_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" }, - "description": "Sample for GetSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py", + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async", "segments": [ { - "end": 43, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.list_snapshots", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" }, - "shortName": "ListSnapshots" + "shortName": "CreateJobFromTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" }, { "name": "retry", @@ -2187,68 +3556,68 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", - "shortName": "list_snapshots" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" }, - "description": "Sample for ListSnapshots", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py", + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", "segments": [ { - "end": 43, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py" + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" }, - "shortName": "ListSnapshots" + "shortName": "CreateJobFromTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" }, { "name": "retry", @@ -2263,47 +3632,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", - "shortName": "list_snapshots" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" }, - "description": "Sample for ListSnapshots", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py", + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", "segments": [ { - "end": 43, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py" }, { "canonical": true, @@ -2313,19 +3682,19 @@ "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", "shortName": "TemplatesServiceAsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.create_job_from_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.get_template", "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", + "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", "service": { "fullName": "google.dataflow.v1beta3.TemplatesService", "shortName": "TemplatesService" }, - "shortName": "CreateJobFromTemplate" + "shortName": "GetTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" }, { "name": "retry", @@ -2340,47 +3709,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job_from_template" + "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", + "shortName": "get_template" }, - "description": "Sample for CreateJobFromTemplate", - "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py", + "description": "Sample for GetTemplate", + "file": "dataflow_v1beta3_generated_templates_service_get_template_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py" + "title": "dataflow_v1beta3_generated_templates_service_get_template_async.py" }, { "canonical": true, @@ -2389,19 +3758,19 @@ "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", + "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", "service": { "fullName": "google.dataflow.v1beta3.TemplatesService", "shortName": "TemplatesService" }, - "shortName": "CreateJobFromTemplate" + "shortName": "GetTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" }, { "name": "retry", @@ -2416,57 +3785,56 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job_from_template" + "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", + "shortName": "get_template" }, - "description": "Sample for CreateJobFromTemplate", - "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py", + "description": "Sample for GetTemplate", + "file": "dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.get_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", "method": { "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", "service": { @@ -2497,64 +3865,65 @@ "shortName": "get_template" }, "description": "Sample for GetTemplate", - "file": "dataflow_v1beta3_generated_templates_service_get_template_async.py", + "file": "dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_get_template_async.py" + "title": "dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", - "shortName": "TemplatesServiceClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.launch_template", "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", + "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", "service": { "fullName": "google.dataflow.v1beta3.TemplatesService", "shortName": "TemplatesService" }, - "shortName": "GetTemplate" + "shortName": "LaunchTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" + "type": "google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest" }, { "name": "retry", @@ -2569,57 +3938,56 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", - "shortName": "get_template" + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse", + "shortName": "launch_template" }, - "description": "Sample for GetTemplate", - "file": "dataflow_v1beta3_generated_templates_service_get_template_sync.py", + "description": "Sample for LaunchTemplate", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_get_template_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_launch_template_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.launch_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.launch_template", "method": { "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", "service": { @@ -2650,43 +4018,43 @@ "shortName": "launch_template" }, "description": "Sample for LaunchTemplate", - "file": "dataflow_v1beta3_generated_templates_service_launch_template_async.py", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_launch_template_async.py" + "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py" }, { "canonical": true, @@ -2726,43 +4094,43 @@ "shortName": "launch_template" }, "description": "Sample for LaunchTemplate", - "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py" } ] } diff --git a/setup.py b/setup.py index a89c058..09af4d4 100644 --- a/setup.py +++ b/setup.py @@ -21,13 +21,13 @@ name = "google-cloud-dataflow-client" description = "Cloud Dataflow API client library" -version = "0.5.5" +version = "0.6.0" release_status = "Development Status :: 4 - Beta" url = "https://github.com/googleapis/python-dataflow-client" dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.1, <5.0.0dev", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 810c7cb..d09d847 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.33.0 proto-plus==1.22.0 -protobuf==3.19.0 +protobuf==3.20.1 diff --git a/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index 9814c52..55bfd8a 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -18,10 +18,12 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,10 +33,14 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.flex_templates_service import ( FlexTemplatesServiceAsyncClient, @@ -94,6 +100,7 @@ def test__get_default_mtls_endpoint(): [ (FlexTemplatesServiceClient, "grpc"), (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), + (FlexTemplatesServiceClient, "rest"), ], ) def test_flex_templates_service_client_from_service_account_info( @@ -109,7 +116,11 @@ def test_flex_templates_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -117,6 +128,7 @@ def test_flex_templates_service_client_from_service_account_info( [ (transports.FlexTemplatesServiceGrpcTransport, "grpc"), (transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FlexTemplatesServiceRestTransport, "rest"), ], ) def test_flex_templates_service_client_service_account_always_use_jwt( @@ -142,6 +154,7 @@ def test_flex_templates_service_client_service_account_always_use_jwt( [ (FlexTemplatesServiceClient, "grpc"), (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), + (FlexTemplatesServiceClient, "rest"), ], ) def test_flex_templates_service_client_from_service_account_file( @@ -164,13 +177,18 @@ def test_flex_templates_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_flex_templates_service_client_get_transport_class(): transport = FlexTemplatesServiceClient.get_transport_class() available_transports = [ transports.FlexTemplatesServiceGrpcTransport, + transports.FlexTemplatesServiceRestTransport, ] assert transport in available_transports @@ -191,6 +209,11 @@ def test_flex_templates_service_client_get_transport_class(): transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -346,6 +369,18 @@ def test_flex_templates_service_client_client_options( "grpc_asyncio", "false", ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + "true", + ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -549,6 +584,11 @@ def test_flex_templates_service_client_get_mtls_endpoint_and_cert_source(client_ transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + ), ], ) def test_flex_templates_service_client_client_options_scopes( @@ -589,6 +629,12 @@ def test_flex_templates_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + None, + ), ], ) def test_flex_templates_service_client_client_options_credentials_file( @@ -862,6 +908,129 @@ async def test_launch_flex_template_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + templates.LaunchFlexTemplateRequest, + dict, + ], +) +def test_launch_flex_template_rest(request_type): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = templates.LaunchFlexTemplateResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.LaunchFlexTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.launch_flex_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchFlexTemplateResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_launch_flex_template_rest_interceptors(null_interceptor): + transport = transports.FlexTemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FlexTemplatesServiceRestInterceptor(), + ) + client = FlexTemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FlexTemplatesServiceRestInterceptor, "post_launch_flex_template" + ) as post, mock.patch.object( + transports.FlexTemplatesServiceRestInterceptor, "pre_launch_flex_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.LaunchFlexTemplateRequest.pb( + templates.LaunchFlexTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.LaunchFlexTemplateResponse.to_json( + templates.LaunchFlexTemplateResponse() + ) + + request = templates.LaunchFlexTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.LaunchFlexTemplateResponse() + + client.launch_flex_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_launch_flex_template_rest_bad_request( + transport: str = "rest", request_type=templates.LaunchFlexTemplateRequest +): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.launch_flex_template(request) + + +def test_launch_flex_template_rest_error(): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.FlexTemplatesServiceGrpcTransport( @@ -943,6 +1112,7 @@ def test_transport_get_channel(): [ transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport, + transports.FlexTemplatesServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -957,6 +1127,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1099,6 +1270,7 @@ def test_flex_templates_service_transport_auth_adc(transport_class): [ transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport, + transports.FlexTemplatesServiceRestTransport, ], ) def test_flex_templates_service_transport_auth_gdch_credentials(transport_class): @@ -1203,11 +1375,23 @@ def test_flex_templates_service_grpc_transport_client_cert_source_for_mtls( ) +def test_flex_templates_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.FlexTemplatesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_flex_templates_service_host_no_port(transport_name): @@ -1218,7 +1402,11 @@ def test_flex_templates_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1226,6 +1414,7 @@ def test_flex_templates_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_flex_templates_service_host_with_port(transport_name): @@ -1236,7 +1425,33 @@ def test_flex_templates_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_flex_templates_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FlexTemplatesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FlexTemplatesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.launch_flex_template._session + session2 = client2.transport.launch_flex_template._session + assert session1 != session2 def test_flex_templates_service_grpc_transport_channel(): @@ -1507,6 +1722,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1524,6 +1740,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index 57d3f1e..b10585a 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -18,10 +18,12 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -33,12 +35,16 @@ from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import ( JobsV1Beta3AsyncClient, @@ -95,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (JobsV1Beta3Client, "grpc"), (JobsV1Beta3AsyncClient, "grpc_asyncio"), + (JobsV1Beta3Client, "rest"), ], ) def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_name): @@ -108,7 +115,11 @@ def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_ assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -116,6 +127,7 @@ def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_ [ (transports.JobsV1Beta3GrpcTransport, "grpc"), (transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.JobsV1Beta3RestTransport, "rest"), ], ) def test_jobs_v1_beta3_client_service_account_always_use_jwt( @@ -141,6 +153,7 @@ def test_jobs_v1_beta3_client_service_account_always_use_jwt( [ (JobsV1Beta3Client, "grpc"), (JobsV1Beta3AsyncClient, "grpc_asyncio"), + (JobsV1Beta3Client, "rest"), ], ) def test_jobs_v1_beta3_client_from_service_account_file(client_class, transport_name): @@ -161,13 +174,18 @@ def test_jobs_v1_beta3_client_from_service_account_file(client_class, transport_ assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_jobs_v1_beta3_client_get_transport_class(): transport = JobsV1Beta3Client.get_transport_class() available_transports = [ transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3RestTransport, ] assert transport in available_transports @@ -184,6 +202,7 @@ def test_jobs_v1_beta3_client_get_transport_class(): transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -327,6 +346,8 @@ def test_jobs_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "true"), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -520,6 +541,7 @@ def test_jobs_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), ], ) def test_jobs_v1_beta3_client_client_options_scopes( @@ -555,6 +577,7 @@ def test_jobs_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", None), ], ) def test_jobs_v1_beta3_client_client_options_credentials_file( @@ -2247,139 +2270,1997 @@ async def test_snapshot_job_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + jobs.CreateJobRequest, + dict, + ], +) +def test_create_job_rest(request_type): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.CreateJobRequest.pb(jobs.CreateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide an api_key and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request( + transport: str = "rest", request_type=jobs.CreateJobRequest +): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) - # It is an error to provide scopes and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) + + +def test_create_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), ) client = JobsV1Beta3Client(transport=transport) - assert client.transport is transport + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_get_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_get_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.GetJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.get_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.JobsV1Beta3GrpcAsyncIOTransport( +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=jobs.GetJobRequest +): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job(request) -@pytest.mark.parametrize( - "transport_class", - [ - transports.JobsV1Beta3GrpcTransport, - transports.JobsV1Beta3GrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_get_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + jobs.UpdateJobRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = JobsV1Beta3Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_update_job_rest(request_type): client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.JobsV1Beta3GrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) -def test_jobs_v1_beta3_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.JobsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_jobs_v1_beta3_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.JobsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_update_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_update_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.UpdateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.update_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_rest_bad_request( + transport: str = "rest", request_type=jobs.UpdateJobRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job(request) + + +def test_update_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) +def test_list_jobs_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_list_jobs" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json( + jobs.ListJobsResponse() + ) + + request = jobs.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_jobs_rest_bad_request( + transport: str = "rest", request_type=jobs.ListJobsRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_jobs(request) + + +def test_list_jobs_rest_pager(transport: str = "rest"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1", "location": "sample2"} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) +def test_aggregated_list_jobs_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.aggregated_list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_aggregated_list_jobs" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_aggregated_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json( + jobs.ListJobsResponse() + ) + + request = jobs.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.aggregated_list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_jobs_rest_bad_request( + transport: str = "rest", request_type=jobs.ListJobsRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list_jobs(request) + + +def test_aggregated_list_jobs_rest_pager(transport: str = "rest"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1"} + + pager = client.aggregated_list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) for i in results) + + pages = list(client.aggregated_list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_check_active_jobs_rest_no_http_options(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = jobs.CheckActiveJobsRequest() + with pytest.raises(RuntimeError): + client.check_active_jobs(request) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.SnapshotJobRequest, + dict, + ], +) +def test_snapshot_job_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.Snapshot( + id="id_value", + project_id="project_id_value", + source_job_id="source_job_id_value", + state=snapshots.SnapshotState.PENDING, + description="description_value", + disk_size_bytes=1611, + region="region_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.snapshot_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.source_job_id == "source_job_id_value" + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == "description_value" + assert response.disk_size_bytes == 1611 + assert response.region == "region_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_snapshot_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_snapshot_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_snapshot_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.SnapshotJobRequest.pb(jobs.SnapshotJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) + + request = jobs.SnapshotJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.Snapshot() + + client.snapshot_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_snapshot_job_rest_bad_request( + transport: str = "rest", request_type=jobs.SnapshotJobRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.snapshot_job(request) + + +def test_snapshot_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_check_active_jobs_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.check_active_jobs({}) + assert ( + "Cannot define a method without a valid 'google.api.http' annotation." + in str(runtime_error.value) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = JobsV1Beta3Client(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.JobsV1Beta3GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + transports.JobsV1Beta3RestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = JobsV1Beta3Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.JobsV1Beta3GrpcTransport, + ) + + +def test_jobs_v1_beta3_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.JobsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_jobs_v1_beta3_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.JobsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. @@ -2492,6 +4373,7 @@ def test_jobs_v1_beta3_transport_auth_adc(transport_class): [ transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport, + transports.JobsV1Beta3RestTransport, ], ) def test_jobs_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -2591,11 +4473,23 @@ def test_jobs_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport_clas ) +def test_jobs_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.JobsV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_jobs_v1_beta3_host_no_port(transport_name): @@ -2606,7 +4500,11 @@ def test_jobs_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -2614,6 +4512,7 @@ def test_jobs_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_jobs_v1_beta3_host_with_port(transport_name): @@ -2624,7 +4523,51 @@ def test_jobs_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_jobs_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = JobsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = JobsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_job._session + session2 = client2.transport.create_job._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.update_job._session + session2 = client2.transport.update_job._session + assert session1 != session2 + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.aggregated_list_jobs._session + session2 = client2.transport.aggregated_list_jobs._session + assert session1 != session2 + session1 = client1.transport.check_active_jobs._session + session2 = client2.transport.check_active_jobs._session + assert session1 != session2 + session1 = client1.transport.snapshot_job._session + session2 = client2.transport.snapshot_job._session + assert session1 != session2 def test_jobs_v1_beta3_grpc_transport_channel(): @@ -2887,6 +4830,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2904,6 +4848,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index 8f020b3..94082a0 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -18,10 +18,12 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,11 +33,15 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import ( MessagesV1Beta3AsyncClient, @@ -95,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (MessagesV1Beta3Client, "grpc"), (MessagesV1Beta3AsyncClient, "grpc_asyncio"), + (MessagesV1Beta3Client, "rest"), ], ) def test_messages_v1_beta3_client_from_service_account_info( @@ -110,7 +117,11 @@ def test_messages_v1_beta3_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -118,6 +129,7 @@ def test_messages_v1_beta3_client_from_service_account_info( [ (transports.MessagesV1Beta3GrpcTransport, "grpc"), (transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MessagesV1Beta3RestTransport, "rest"), ], ) def test_messages_v1_beta3_client_service_account_always_use_jwt( @@ -143,6 +155,7 @@ def test_messages_v1_beta3_client_service_account_always_use_jwt( [ (MessagesV1Beta3Client, "grpc"), (MessagesV1Beta3AsyncClient, "grpc_asyncio"), + (MessagesV1Beta3Client, "rest"), ], ) def test_messages_v1_beta3_client_from_service_account_file( @@ -165,13 +178,18 @@ def test_messages_v1_beta3_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_messages_v1_beta3_client_get_transport_class(): transport = MessagesV1Beta3Client.get_transport_class() available_transports = [ transports.MessagesV1Beta3GrpcTransport, + transports.MessagesV1Beta3RestTransport, ] assert transport in available_transports @@ -188,6 +206,7 @@ def test_messages_v1_beta3_client_get_transport_class(): transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -343,6 +362,18 @@ def test_messages_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + ( + MessagesV1Beta3Client, + transports.MessagesV1Beta3RestTransport, + "rest", + "true", + ), + ( + MessagesV1Beta3Client, + transports.MessagesV1Beta3RestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -542,6 +573,7 @@ def test_messages_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), ], ) def test_messages_v1_beta3_client_client_options_scopes( @@ -582,6 +614,7 @@ def test_messages_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", None), ], ) def test_messages_v1_beta3_client_client_options_credentials_file( @@ -1065,6 +1098,191 @@ async def test_list_job_messages_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + messages.ListJobMessagesRequest, + dict, + ], +) +def test_list_job_messages_rest(request_type): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = messages.ListJobMessagesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = messages.ListJobMessagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_job_messages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobMessagesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_messages_rest_interceptors(null_interceptor): + transport = transports.MessagesV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MessagesV1Beta3RestInterceptor(), + ) + client = MessagesV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MessagesV1Beta3RestInterceptor, "post_list_job_messages" + ) as post, mock.patch.object( + transports.MessagesV1Beta3RestInterceptor, "pre_list_job_messages" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = messages.ListJobMessagesRequest.pb( + messages.ListJobMessagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = messages.ListJobMessagesResponse.to_json( + messages.ListJobMessagesResponse() + ) + + request = messages.ListJobMessagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = messages.ListJobMessagesResponse() + + client.list_job_messages( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_messages_rest_bad_request( + transport: str = "rest", request_type=messages.ListJobMessagesRequest +): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_messages(request) + + +def test_list_job_messages_rest_pager(transport: str = "rest"): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token="abc", + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token="def", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token="ghi", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(messages.ListJobMessagesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + } + + pager = client.list_job_messages(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, messages.JobMessage) for i in results) + + pages = list(client.list_job_messages(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MessagesV1Beta3GrpcTransport( @@ -1146,6 +1364,7 @@ def test_transport_get_channel(): [ transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport, + transports.MessagesV1Beta3RestTransport, ], ) def test_transport_adc(transport_class): @@ -1160,6 +1379,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1302,6 +1522,7 @@ def test_messages_v1_beta3_transport_auth_adc(transport_class): [ transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport, + transports.MessagesV1Beta3RestTransport, ], ) def test_messages_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -1404,11 +1625,23 @@ def test_messages_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport_ ) +def test_messages_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MessagesV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_messages_v1_beta3_host_no_port(transport_name): @@ -1419,7 +1652,11 @@ def test_messages_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1427,6 +1664,7 @@ def test_messages_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_messages_v1_beta3_host_with_port(transport_name): @@ -1437,7 +1675,33 @@ def test_messages_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_messages_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MessagesV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MessagesV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_job_messages._session + session2 = client2.transport.list_job_messages._session + assert session1 != session2 def test_messages_v1_beta3_grpc_transport_channel(): @@ -1708,6 +1972,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1725,6 +1990,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index 99f17d7..11345e5 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -18,10 +18,12 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,11 +33,15 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import ( MetricsV1Beta3AsyncClient, @@ -95,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (MetricsV1Beta3Client, "grpc"), (MetricsV1Beta3AsyncClient, "grpc_asyncio"), + (MetricsV1Beta3Client, "rest"), ], ) def test_metrics_v1_beta3_client_from_service_account_info( @@ -110,7 +117,11 @@ def test_metrics_v1_beta3_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -118,6 +129,7 @@ def test_metrics_v1_beta3_client_from_service_account_info( [ (transports.MetricsV1Beta3GrpcTransport, "grpc"), (transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetricsV1Beta3RestTransport, "rest"), ], ) def test_metrics_v1_beta3_client_service_account_always_use_jwt( @@ -143,6 +155,7 @@ def test_metrics_v1_beta3_client_service_account_always_use_jwt( [ (MetricsV1Beta3Client, "grpc"), (MetricsV1Beta3AsyncClient, "grpc_asyncio"), + (MetricsV1Beta3Client, "rest"), ], ) def test_metrics_v1_beta3_client_from_service_account_file( @@ -165,13 +178,18 @@ def test_metrics_v1_beta3_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_metrics_v1_beta3_client_get_transport_class(): transport = MetricsV1Beta3Client.get_transport_class() available_transports = [ transports.MetricsV1Beta3GrpcTransport, + transports.MetricsV1Beta3RestTransport, ] assert transport in available_transports @@ -188,6 +206,7 @@ def test_metrics_v1_beta3_client_get_transport_class(): transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -333,6 +352,8 @@ def test_metrics_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "true"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -532,6 +553,7 @@ def test_metrics_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class) transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), ], ) def test_metrics_v1_beta3_client_client_options_scopes( @@ -572,6 +594,7 @@ def test_metrics_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", None), ], ) def test_metrics_v1_beta3_client_client_options_credentials_file( @@ -1569,6 +1592,506 @@ async def test_get_stage_execution_details_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetJobMetricsRequest, + dict, + ], +) +def test_get_job_metrics_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metrics.JobMetrics() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.JobMetrics.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_metrics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metrics.JobMetrics) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_metrics_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_job_metrics" + ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "pre_get_job_metrics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetJobMetricsRequest.pb(metrics.GetJobMetricsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.JobMetrics.to_json(metrics.JobMetrics()) + + request = metrics.GetJobMetricsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.JobMetrics() + + client.get_job_metrics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_metrics_rest_bad_request( + transport: str = "rest", request_type=metrics.GetJobMetricsRequest +): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_metrics(request) + + +def test_get_job_metrics_rest_error(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetJobExecutionDetailsRequest, + dict, + ], +) +def test_get_job_execution_details_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metrics.JobExecutionDetails( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.JobExecutionDetails.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_execution_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetJobExecutionDetailsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_execution_details_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_job_execution_details" + ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "pre_get_job_execution_details" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetJobExecutionDetailsRequest.pb( + metrics.GetJobExecutionDetailsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.JobExecutionDetails.to_json( + metrics.JobExecutionDetails() + ) + + request = metrics.GetJobExecutionDetailsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.JobExecutionDetails() + + client.get_job_execution_details( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_execution_details_rest_bad_request( + transport: str = "rest", request_type=metrics.GetJobExecutionDetailsRequest +): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_execution_details(request) + + +def test_get_job_execution_details_rest_pager(transport: str = "rest"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token="abc", + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token="def", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token="ghi", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metrics.JobExecutionDetails.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + } + + pager = client.get_job_execution_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.StageSummary) for i in results) + + pages = list(client.get_job_execution_details(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetStageExecutionDetailsRequest, + dict, + ], +) +def test_get_stage_execution_details_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + "stage_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metrics.StageExecutionDetails( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.StageExecutionDetails.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stage_execution_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetStageExecutionDetailsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stage_execution_details_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_stage_execution_details" + ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "pre_get_stage_execution_details" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetStageExecutionDetailsRequest.pb( + metrics.GetStageExecutionDetailsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.StageExecutionDetails.to_json( + metrics.StageExecutionDetails() + ) + + request = metrics.GetStageExecutionDetailsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.StageExecutionDetails() + + client.get_stage_execution_details( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stage_execution_details_rest_bad_request( + transport: str = "rest", request_type=metrics.GetStageExecutionDetailsRequest +): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + "stage_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stage_execution_details(request) + + +def test_get_stage_execution_details_rest_pager(transport: str = "rest"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token="abc", + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token="def", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token="ghi", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metrics.StageExecutionDetails.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + "stage_id": "sample4", + } + + pager = client.get_stage_execution_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.WorkerDetails) for i in results) + + pages = list(client.get_stage_execution_details(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsV1Beta3GrpcTransport( @@ -1650,6 +2173,7 @@ def test_transport_get_channel(): [ transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport, + transports.MetricsV1Beta3RestTransport, ], ) def test_transport_adc(transport_class): @@ -1664,6 +2188,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1810,6 +2335,7 @@ def test_metrics_v1_beta3_transport_auth_adc(transport_class): [ transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport, + transports.MetricsV1Beta3RestTransport, ], ) def test_metrics_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -1912,11 +2438,23 @@ def test_metrics_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport_c ) +def test_metrics_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MetricsV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_metrics_v1_beta3_host_no_port(transport_name): @@ -1927,7 +2465,11 @@ def test_metrics_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1935,6 +2477,7 @@ def test_metrics_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_metrics_v1_beta3_host_with_port(transport_name): @@ -1945,7 +2488,39 @@ def test_metrics_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_metrics_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetricsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MetricsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_job_metrics._session + session2 = client2.transport.get_job_metrics._session + assert session1 != session2 + session1 = client1.transport.get_job_execution_details._session + session2 = client2.transport.get_job_execution_details._session + assert session1 != session2 + session1 = client1.transport.get_stage_execution_details._session + session2 = client2.transport.get_stage_execution_details._session + assert session1 != session2 def test_metrics_v1_beta3_grpc_transport_channel(): @@ -2216,6 +2791,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2233,6 +2809,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index 1289ad8..ab23669 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -18,10 +18,12 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,11 +34,15 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import ( SnapshotsV1Beta3AsyncClient, @@ -96,6 +102,7 @@ def test__get_default_mtls_endpoint(): [ (SnapshotsV1Beta3Client, "grpc"), (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), + (SnapshotsV1Beta3Client, "rest"), ], ) def test_snapshots_v1_beta3_client_from_service_account_info( @@ -111,7 +118,11 @@ def test_snapshots_v1_beta3_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -119,6 +130,7 @@ def test_snapshots_v1_beta3_client_from_service_account_info( [ (transports.SnapshotsV1Beta3GrpcTransport, "grpc"), (transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SnapshotsV1Beta3RestTransport, "rest"), ], ) def test_snapshots_v1_beta3_client_service_account_always_use_jwt( @@ -144,6 +156,7 @@ def test_snapshots_v1_beta3_client_service_account_always_use_jwt( [ (SnapshotsV1Beta3Client, "grpc"), (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), + (SnapshotsV1Beta3Client, "rest"), ], ) def test_snapshots_v1_beta3_client_from_service_account_file( @@ -166,13 +179,18 @@ def test_snapshots_v1_beta3_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_snapshots_v1_beta3_client_get_transport_class(): transport = SnapshotsV1Beta3Client.get_transport_class() available_transports = [ transports.SnapshotsV1Beta3GrpcTransport, + transports.SnapshotsV1Beta3RestTransport, ] assert transport in available_transports @@ -189,6 +207,7 @@ def test_snapshots_v1_beta3_client_get_transport_class(): transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -344,6 +363,18 @@ def test_snapshots_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + ( + SnapshotsV1Beta3Client, + transports.SnapshotsV1Beta3RestTransport, + "rest", + "true", + ), + ( + SnapshotsV1Beta3Client, + transports.SnapshotsV1Beta3RestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -543,6 +574,7 @@ def test_snapshots_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_clas transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), ], ) def test_snapshots_v1_beta3_client_client_options_scopes( @@ -583,6 +615,12 @@ def test_snapshots_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + SnapshotsV1Beta3Client, + transports.SnapshotsV1Beta3RestTransport, + "rest", + None, + ), ], ) def test_snapshots_v1_beta3_client_client_options_credentials_file( @@ -1172,6 +1210,400 @@ async def test_list_snapshots_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + snapshots.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.Snapshot( + id="id_value", + project_id="project_id_value", + source_job_id="source_job_id_value", + state=snapshots.SnapshotState.PENDING, + description="description_value", + disk_size_bytes=1611, + region="region_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.source_job_id == "source_job_id_value" + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == "description_value" + assert response.disk_size_bytes == 1611 + assert response.region == "region_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_get_snapshot" + ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "pre_get_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.GetSnapshotRequest.pb(snapshots.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) + + request = snapshots.GetSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.Snapshot() + + client.get_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_snapshot_rest_bad_request( + transport: str = "rest", request_type=snapshots.GetSnapshotRequest +): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_snapshot(request) + + +def test_get_snapshot_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + snapshots.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.DeleteSnapshotResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.DeleteSnapshotResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.DeleteSnapshotResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_delete_snapshot" + ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "pre_delete_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.DeleteSnapshotRequest.pb( + snapshots.DeleteSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.DeleteSnapshotResponse.to_json( + snapshots.DeleteSnapshotResponse() + ) + + request = snapshots.DeleteSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.DeleteSnapshotResponse() + + client.delete_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_snapshot_rest_bad_request( + transport: str = "rest", request_type=snapshots.DeleteSnapshotRequest +): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_snapshot(request) + + +def test_delete_snapshot_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + snapshots.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.ListSnapshotsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.ListSnapshotsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_list_snapshots" + ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "pre_list_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.ListSnapshotsRequest.pb(snapshots.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.ListSnapshotsResponse.to_json( + snapshots.ListSnapshotsResponse() + ) + + request = snapshots.ListSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.ListSnapshotsResponse() + + client.list_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_snapshots_rest_bad_request( + transport: str = "rest", request_type=snapshots.ListSnapshotsRequest +): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_snapshots(request) + + +def test_list_snapshots_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SnapshotsV1Beta3GrpcTransport( @@ -1253,6 +1685,7 @@ def test_transport_get_channel(): [ transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + transports.SnapshotsV1Beta3RestTransport, ], ) def test_transport_adc(transport_class): @@ -1267,6 +1700,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1413,6 +1847,7 @@ def test_snapshots_v1_beta3_transport_auth_adc(transport_class): [ transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + transports.SnapshotsV1Beta3RestTransport, ], ) def test_snapshots_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -1515,11 +1950,23 @@ def test_snapshots_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport ) +def test_snapshots_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SnapshotsV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_snapshots_v1_beta3_host_no_port(transport_name): @@ -1530,7 +1977,11 @@ def test_snapshots_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1538,6 +1989,7 @@ def test_snapshots_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_snapshots_v1_beta3_host_with_port(transport_name): @@ -1548,7 +2000,39 @@ def test_snapshots_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_snapshots_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SnapshotsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = SnapshotsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_snapshot._session + session2 = client2.transport.get_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete_snapshot._session + session2 = client2.transport.delete_snapshot._session + assert session1 != session2 + session1 = client1.transport.list_snapshots._session + session2 = client2.transport.list_snapshots._session + assert session1 != session2 def test_snapshots_v1_beta3_grpc_transport_channel(): @@ -1819,6 +2303,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1836,6 +2321,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index 3a8f087..c246da3 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -18,10 +18,12 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,12 +33,16 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.templates_service import ( TemplatesServiceAsyncClient, @@ -96,6 +102,7 @@ def test__get_default_mtls_endpoint(): [ (TemplatesServiceClient, "grpc"), (TemplatesServiceAsyncClient, "grpc_asyncio"), + (TemplatesServiceClient, "rest"), ], ) def test_templates_service_client_from_service_account_info( @@ -111,7 +118,11 @@ def test_templates_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -119,6 +130,7 @@ def test_templates_service_client_from_service_account_info( [ (transports.TemplatesServiceGrpcTransport, "grpc"), (transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TemplatesServiceRestTransport, "rest"), ], ) def test_templates_service_client_service_account_always_use_jwt( @@ -144,6 +156,7 @@ def test_templates_service_client_service_account_always_use_jwt( [ (TemplatesServiceClient, "grpc"), (TemplatesServiceAsyncClient, "grpc_asyncio"), + (TemplatesServiceClient, "rest"), ], ) def test_templates_service_client_from_service_account_file( @@ -166,13 +179,18 @@ def test_templates_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_templates_service_client_get_transport_class(): transport = TemplatesServiceClient.get_transport_class() available_transports = [ transports.TemplatesServiceGrpcTransport, + transports.TemplatesServiceRestTransport, ] assert transport in available_transports @@ -189,6 +207,7 @@ def test_templates_service_client_get_transport_class(): transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -344,6 +363,18 @@ def test_templates_service_client_client_options( "grpc_asyncio", "false", ), + ( + TemplatesServiceClient, + transports.TemplatesServiceRestTransport, + "rest", + "true", + ), + ( + TemplatesServiceClient, + transports.TemplatesServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -543,6 +574,7 @@ def test_templates_service_client_get_mtls_endpoint_and_cert_source(client_class transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), ], ) def test_templates_service_client_client_options_scopes( @@ -583,6 +615,12 @@ def test_templates_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + TemplatesServiceClient, + transports.TemplatesServiceRestTransport, + "rest", + None, + ), ], ) def test_templates_service_client_client_options_credentials_file( @@ -1210,6 +1248,457 @@ async def test_get_template_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + templates.CreateJobFromTemplateRequest, + dict, + ], +) +def test_create_job_from_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job_from_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_from_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_create_job_from_template" + ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "pre_create_job_from_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.CreateJobFromTemplateRequest.pb( + templates.CreateJobFromTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = templates.CreateJobFromTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.create_job_from_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_from_template_rest_bad_request( + transport: str = "rest", request_type=templates.CreateJobFromTemplateRequest +): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_from_template(request) + + +def test_create_job_from_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + templates.LaunchTemplateRequest, + dict, + ], +) +def test_launch_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["launch_parameters"] = { + "job_name": "job_name_value", + "parameters": {}, + "environment": { + "num_workers": 1212, + "max_workers": 1202, + "zone": "zone_value", + "service_account_email": "service_account_email_value", + "temp_location": "temp_location_value", + "bypass_temp_dir_validation": True, + "machine_type": "machine_type_value", + "additional_experiments": [ + "additional_experiments_value1", + "additional_experiments_value2", + ], + "network": "network_value", + "subnetwork": "subnetwork_value", + "additional_user_labels": {}, + "kms_key_name": "kms_key_name_value", + "ip_configuration": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "enable_streaming_engine": True, + }, + "update": True, + "transform_name_mapping": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = templates.LaunchTemplateResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.LaunchTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.launch_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchTemplateResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_launch_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_launch_template" + ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "pre_launch_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.LaunchTemplateRequest.pb( + templates.LaunchTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.LaunchTemplateResponse.to_json( + templates.LaunchTemplateResponse() + ) + + request = templates.LaunchTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.LaunchTemplateResponse() + + client.launch_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_launch_template_rest_bad_request( + transport: str = "rest", request_type=templates.LaunchTemplateRequest +): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["launch_parameters"] = { + "job_name": "job_name_value", + "parameters": {}, + "environment": { + "num_workers": 1212, + "max_workers": 1202, + "zone": "zone_value", + "service_account_email": "service_account_email_value", + "temp_location": "temp_location_value", + "bypass_temp_dir_validation": True, + "machine_type": "machine_type_value", + "additional_experiments": [ + "additional_experiments_value1", + "additional_experiments_value2", + ], + "network": "network_value", + "subnetwork": "subnetwork_value", + "additional_user_labels": {}, + "kms_key_name": "kms_key_name_value", + "ip_configuration": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "enable_streaming_engine": True, + }, + "update": True, + "transform_name_mapping": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.launch_template(request) + + +def test_launch_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + templates.GetTemplateRequest, + dict, + ], +) +def test_get_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = templates.GetTemplateResponse( + template_type=templates.GetTemplateResponse.TemplateType.LEGACY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.GetTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.GetTemplateResponse) + assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_get_template" + ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "pre_get_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.GetTemplateRequest.pb(templates.GetTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.GetTemplateResponse.to_json( + templates.GetTemplateResponse() + ) + + request = templates.GetTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.GetTemplateResponse() + + client.get_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_template_rest_bad_request( + transport: str = "rest", request_type=templates.GetTemplateRequest +): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_template(request) + + +def test_get_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TemplatesServiceGrpcTransport( @@ -1291,6 +1780,7 @@ def test_transport_get_channel(): [ transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport, + transports.TemplatesServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1305,6 +1795,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1451,6 +1942,7 @@ def test_templates_service_transport_auth_adc(transport_class): [ transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport, + transports.TemplatesServiceRestTransport, ], ) def test_templates_service_transport_auth_gdch_credentials(transport_class): @@ -1553,11 +2045,23 @@ def test_templates_service_grpc_transport_client_cert_source_for_mtls(transport_ ) +def test_templates_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TemplatesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_templates_service_host_no_port(transport_name): @@ -1568,7 +2072,11 @@ def test_templates_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1576,6 +2084,7 @@ def test_templates_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_templates_service_host_with_port(transport_name): @@ -1586,7 +2095,39 @@ def test_templates_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_templates_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TemplatesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TemplatesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_job_from_template._session + session2 = client2.transport.create_job_from_template._session + assert session1 != session2 + session1 = client1.transport.launch_template._session + session2 = client2.transport.launch_template._session + assert session1 != session2 + session1 = client1.transport.get_template._session + session2 = client2.transport.get_template._session + assert session1 != session2 def test_templates_service_grpc_transport_channel(): @@ -1857,6 +2398,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1874,6 +2416,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: