diff --git a/.coveragerc b/.coveragerc index 9a4d485..2e41c36 100644 --- a/.coveragerc +++ b/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3815c98..bb21147 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e..528b274 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,3 @@ releaseType: python handleGHRelease: true +manifest: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7092a13..e97d89e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b..16d5a9e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 87ade4d..23000c0 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 238b87b..f8137d0 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391..cbd7e77 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index d15994b..9c1b9be 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -377,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..f1c1e58 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "0.5.0" +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a57a13..b8c5580 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,30 @@ # Changelog +## [0.5.0](https://github.com/googleapis/python-batch/compare/v0.4.1...v0.5.0) (2022-12-07) + + +### Features + +* add support for `google.cloud.batch.__version__` ([2f6bdca](https://github.com/googleapis/python-batch/commit/2f6bdcace12b0401e239b08e83a7cb381005d275)) +* Add typing to proto.Message based class attributes ([2f6bdca](https://github.com/googleapis/python-batch/commit/2f6bdcace12b0401e239b08e83a7cb381005d275)) +* Adds named reservation to InstancePolicy ([9414457](https://github.com/googleapis/python-batch/commit/9414457a16f80cb546b19db1d8f4260883e6f21f)) + + +### Bug Fixes + +* Add dict typing for client_options ([2f6bdca](https://github.com/googleapis/python-batch/commit/2f6bdcace12b0401e239b08e83a7cb381005d275)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([1b70819](https://github.com/googleapis/python-batch/commit/1b708191b9dc978930ac38870a994777979f84bf)) +* Drop usage of pkg_resources ([1b70819](https://github.com/googleapis/python-batch/commit/1b708191b9dc978930ac38870a994777979f84bf)) +* Fix timeout default values ([1b70819](https://github.com/googleapis/python-batch/commit/1b708191b9dc978930ac38870a994777979f84bf)) + + +### Documentation + +* Remove "not yet implemented" for Accelerator & Refine Volume API docs ([9414457](https://github.com/googleapis/python-batch/commit/9414457a16f80cb546b19db1d8f4260883e6f21f)) +* **samples:** Snippetgen handling of repeated enum field ([2f6bdca](https://github.com/googleapis/python-batch/commit/2f6bdcace12b0401e239b08e83a7cb381005d275)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([1b70819](https://github.com/googleapis/python-batch/commit/1b708191b9dc978930ac38870a994777979f84bf)) +* update the job id format requirement ([9414457](https://github.com/googleapis/python-batch/commit/9414457a16f80cb546b19db1d8f4260883e6f21f)) + ## [0.4.1](https://github.com/googleapis/python-batch/compare/v0.4.0...v0.4.1) (2022-10-27) diff --git a/docs/batch_v1/types.rst b/docs/batch_v1/types.rst index b6782f0..e56f8a2 100644 --- a/docs/batch_v1/types.rst +++ b/docs/batch_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Batch v1 API .. automodule:: google.cloud.batch_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/docs/batch_v1alpha/types.rst b/docs/batch_v1alpha/types.rst index aea058d..c93a1c4 100644 --- a/docs/batch_v1alpha/types.rst +++ b/docs/batch_v1alpha/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Batch v1alpha API .. automodule:: google.cloud.batch_v1alpha.types :members: - :undoc-members: :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py index d283f4a..ff986c7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/google/cloud/batch/__init__.py b/google/cloud/batch/__init__.py new file mode 100644 index 0000000..9b2defc --- /dev/null +++ b/google/cloud/batch/__init__.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.batch import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.batch_v1.services.batch_service.async_client import ( + BatchServiceAsyncClient, +) +from google.cloud.batch_v1.services.batch_service.client import BatchServiceClient +from google.cloud.batch_v1.types.batch import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + GetTaskRequest, + ListJobsRequest, + ListJobsResponse, + ListTasksRequest, + ListTasksResponse, + OperationMetadata, +) +from google.cloud.batch_v1.types.job import ( + AllocationPolicy, + Job, + JobNotification, + JobStatus, + LogsPolicy, + ServiceAccount, + TaskGroup, +) +from google.cloud.batch_v1.types.task import ( + ComputeResource, + Environment, + LifecyclePolicy, + Runnable, + StatusEvent, + Task, + TaskExecution, + TaskSpec, + TaskStatus, +) +from google.cloud.batch_v1.types.volume import GCS, NFS, Volume + +__all__ = ( + "BatchServiceClient", + "BatchServiceAsyncClient", + "CreateJobRequest", + "DeleteJobRequest", + "GetJobRequest", + "GetTaskRequest", + "ListJobsRequest", + "ListJobsResponse", + "ListTasksRequest", + "ListTasksResponse", + "OperationMetadata", + "AllocationPolicy", + "Job", + "JobNotification", + "JobStatus", + "LogsPolicy", + "ServiceAccount", + "TaskGroup", + "ComputeResource", + "Environment", + "LifecyclePolicy", + "Runnable", + "StatusEvent", + "Task", + "TaskExecution", + "TaskSpec", + "TaskStatus", + "GCS", + "NFS", + "Volume", +) diff --git a/google/cloud/batch/gapic_version.py b/google/cloud/batch/gapic_version.py new file mode 100644 index 0000000..371eb6b --- /dev/null +++ b/google/cloud/batch/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.5.0" # {x-release-please-version} diff --git a/google/cloud/batch/py.typed b/google/cloud/batch/py.typed new file mode 100644 index 0000000..32c66c8 --- /dev/null +++ b/google/cloud/batch/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-batch package uses inline types. diff --git a/google/cloud/batch_v1/__init__.py b/google/cloud/batch_v1/__init__.py index 203672c..566e33b 100644 --- a/google/cloud/batch_v1/__init__.py +++ b/google/cloud/batch_v1/__init__.py @@ -13,38 +13,44 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.batch import gapic_version as package_version -from .services.batch_service import BatchServiceClient -from .services.batch_service import BatchServiceAsyncClient +__version__ = package_version.__version__ -from .types.batch import CreateJobRequest -from .types.batch import DeleteJobRequest -from .types.batch import GetJobRequest -from .types.batch import GetTaskRequest -from .types.batch import ListJobsRequest -from .types.batch import ListJobsResponse -from .types.batch import ListTasksRequest -from .types.batch import ListTasksResponse -from .types.batch import OperationMetadata -from .types.job import AllocationPolicy -from .types.job import Job -from .types.job import JobNotification -from .types.job import JobStatus -from .types.job import LogsPolicy -from .types.job import ServiceAccount -from .types.job import TaskGroup -from .types.task import ComputeResource -from .types.task import Environment -from .types.task import LifecyclePolicy -from .types.task import Runnable -from .types.task import StatusEvent -from .types.task import Task -from .types.task import TaskExecution -from .types.task import TaskSpec -from .types.task import TaskStatus -from .types.volume import GCS -from .types.volume import NFS -from .types.volume import Volume + +from .services.batch_service import BatchServiceAsyncClient, BatchServiceClient +from .types.batch import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + GetTaskRequest, + ListJobsRequest, + ListJobsResponse, + ListTasksRequest, + ListTasksResponse, + OperationMetadata, +) +from .types.job import ( + AllocationPolicy, + Job, + JobNotification, + JobStatus, + LogsPolicy, + ServiceAccount, + TaskGroup, +) +from .types.task import ( + ComputeResource, + Environment, + LifecyclePolicy, + Runnable, + StatusEvent, + Task, + TaskExecution, + TaskSpec, + TaskStatus, +) +from .types.volume import GCS, NFS, Volume __all__ = ( "BatchServiceAsyncClient", diff --git a/google/cloud/batch_v1/gapic_version.py b/google/cloud/batch_v1/gapic_version.py new file mode 100644 index 0000000..371eb6b --- /dev/null +++ b/google/cloud/batch_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.5.0" # {x-release-please-version} diff --git a/google/cloud/batch_v1/services/batch_service/__init__.py b/google/cloud/batch_v1/services/batch_service/__init__.py index 68fe726..450c3c1 100644 --- a/google/cloud/batch_v1/services/batch_service/__init__.py +++ b/google/cloud/batch_v1/services/batch_service/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BatchServiceClient from .async_client import BatchServiceAsyncClient +from .client import BatchServiceClient __all__ = ( "BatchServiceClient", diff --git a/google/cloud/batch_v1/services/batch_service/async_client.py b/google/cloud/batch_v1/services/batch_service/async_client.py index 8db6de0..63fb64d 100644 --- a/google/cloud/batch_v1/services/batch_service/async_client.py +++ b/google/cloud/batch_v1/services/batch_service/async_client.py @@ -16,16 +16,27 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.batch_v1 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -33,20 +44,22 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1.services.batch_service import pagers -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport + +from google.cloud.batch_v1.services.batch_service import pagers +from google.cloud.batch_v1.types import batch +from google.cloud.batch_v1.types import job +from google.cloud.batch_v1.types import job as gcb_job +from google.cloud.batch_v1.types import task + from .client import BatchServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BatchServiceTransport +from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport class BatchServiceAsyncClient: @@ -172,9 +185,9 @@ def transport(self) -> BatchServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, BatchServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the batch service client. @@ -218,13 +231,13 @@ def __init__( async def create_job( self, - request: Union[batch.CreateJobRequest, dict] = None, + request: Optional[Union[batch.CreateJobRequest, dict]] = None, *, - parent: str = None, - job: gcb_job.Job = None, - job_id: str = None, + parent: Optional[str] = None, + job: Optional[gcb_job.Job] = None, + job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -256,7 +269,7 @@ async def sample_create_job(): print(response) Args: - request (Union[google.cloud.batch_v1.types.CreateJobRequest, dict]): + request (Optional[Union[google.cloud.batch_v1.types.CreateJobRequest, dict]]): The request object. CreateJob Request. parent (:class:`str`): Required. The parent resource name @@ -273,11 +286,11 @@ async def sample_create_job(): should not be set. job_id (:class:`str`): ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters. - Only alphanumeric characters or '-' are accepted. The - '-' character cannot be the first or the last one. A - system generated ID will be used if the field is not - set. + scope. This field should contain at most 63 characters + and must start with lowercase characters. Only lowercase + characters, numbers and '-' are accepted. The '-' + character cannot be the first or the last one. A system + generated ID will be used if the field is not set. The job.name field in the request will be ignored and the created resource name of the Job will be @@ -344,11 +357,11 @@ async def sample_create_job(): async def get_job( self, - request: Union[batch.GetJobRequest, dict] = None, + request: Optional[Union[batch.GetJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -380,7 +393,7 @@ async def sample_get_job(): print(response) Args: - request (Union[google.cloud.batch_v1.types.GetJobRequest, dict]): + request (Optional[Union[google.cloud.batch_v1.types.GetJobRequest, dict]]): The request object. GetJob Request. name (:class:`str`): Required. Job name. @@ -450,11 +463,11 @@ async def sample_get_job(): async def delete_job( self, - request: Union[batch.DeleteJobRequest, dict] = None, + request: Optional[Union[batch.DeleteJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete a Job. @@ -483,13 +496,13 @@ async def sample_delete_job(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.batch_v1.types.DeleteJobRequest, dict]): + request (Optional[Union[google.cloud.batch_v1.types.DeleteJobRequest, dict]]): The request object. DeleteJob Request. name (:class:`str`): Job name. @@ -570,11 +583,11 @@ async def sample_delete_job(): async def list_jobs( self, - request: Union[batch.ListJobsRequest, dict] = None, + request: Optional[Union[batch.ListJobsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListJobsAsyncPager: r"""List all Jobs for a project within a region. @@ -606,7 +619,7 @@ async def sample_list_jobs(): print(response) Args: - request (Union[google.cloud.batch_v1.types.ListJobsRequest, dict]): + request (Optional[Union[google.cloud.batch_v1.types.ListJobsRequest, dict]]): The request object. ListJob Request. parent (:class:`str`): Parent path. @@ -689,11 +702,11 @@ async def sample_list_jobs(): async def get_task( self, - request: Union[batch.GetTaskRequest, dict] = None, + request: Optional[Union[batch.GetTaskRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> task.Task: r"""Return a single Task. @@ -725,7 +738,7 @@ async def sample_get_task(): print(response) Args: - request (Union[google.cloud.batch_v1.types.GetTaskRequest, dict]): + request (Optional[Union[google.cloud.batch_v1.types.GetTaskRequest, dict]]): The request object. Request for a single Task by name. name (:class:`str`): Required. Task name. @@ -795,11 +808,11 @@ async def sample_get_task(): async def list_tasks( self, - request: Union[batch.ListTasksRequest, dict] = None, + request: Optional[Union[batch.ListTasksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTasksAsyncPager: r"""List Tasks associated with a job. @@ -832,7 +845,7 @@ async def sample_list_tasks(): print(response) Args: - request (Union[google.cloud.batch_v1.types.ListTasksRequest, dict]): + request (Optional[Union[google.cloud.batch_v1.types.ListTasksRequest, dict]]): The request object. ListTasks Request. parent (:class:`str`): Required. Name of a TaskGroup from which Tasks are being @@ -918,10 +931,10 @@ async def sample_list_tasks(): async def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -972,10 +985,10 @@ async def list_operations( async def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1026,10 +1039,10 @@ async def get_operation( async def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1081,10 +1094,10 @@ async def delete_operation( async def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1135,10 +1148,10 @@ async def cancel_operation( async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1255,10 +1268,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1376,10 +1389,10 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -1435,10 +1448,10 @@ async def test_iam_permissions( async def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1489,10 +1502,10 @@ async def get_location( async def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1548,14 +1561,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-batch", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("BatchServiceAsyncClient",) diff --git a/google/cloud/batch_v1/services/batch_service/client.py b/google/cloud/batch_v1/services/batch_service/client.py index b4e0b88..efcbb6d 100644 --- a/google/cloud/batch_v1/services/batch_service/client.py +++ b/google/cloud/batch_v1/services/batch_service/client.py @@ -16,19 +16,31 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.batch_v1 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -36,18 +48,20 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1.services.batch_service import pagers -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO + +from google.cloud.batch_v1.services.batch_service import pagers +from google.cloud.batch_v1.types import batch +from google.cloud.batch_v1.types import job +from google.cloud.batch_v1.types import job as gcb_job +from google.cloud.batch_v1.types import task + +from .transports.base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .transports.grpc import BatchServiceGrpcTransport from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport from .transports.rest import BatchServiceRestTransport @@ -68,7 +82,7 @@ class BatchServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[BatchServiceTransport]: """Returns an appropriate transport class. @@ -396,8 +410,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, BatchServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, BatchServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the batch service client. @@ -411,10 +425,7 @@ def __init__( transport (Union[str, BatchServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -444,6 +455,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -496,13 +508,13 @@ def __init__( def create_job( self, - request: Union[batch.CreateJobRequest, dict] = None, + request: Optional[Union[batch.CreateJobRequest, dict]] = None, *, - parent: str = None, - job: gcb_job.Job = None, - job_id: str = None, + parent: Optional[str] = None, + job: Optional[gcb_job.Job] = None, + job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -551,11 +563,11 @@ def sample_create_job(): should not be set. job_id (str): ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters. - Only alphanumeric characters or '-' are accepted. The - '-' character cannot be the first or the last one. A - system generated ID will be used if the field is not - set. + scope. This field should contain at most 63 characters + and must start with lowercase characters. Only lowercase + characters, numbers and '-' are accepted. The '-' + character cannot be the first or the last one. A system + generated ID will be used if the field is not set. The job.name field in the request will be ignored and the created resource name of the Job will be @@ -622,11 +634,11 @@ def sample_create_job(): def get_job( self, - request: Union[batch.GetJobRequest, dict] = None, + request: Optional[Union[batch.GetJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -719,11 +731,11 @@ def sample_get_job(): def delete_job( self, - request: Union[batch.DeleteJobRequest, dict] = None, + request: Optional[Union[batch.DeleteJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete a Job. @@ -839,11 +851,11 @@ def sample_delete_job(): def list_jobs( self, - request: Union[batch.ListJobsRequest, dict] = None, + request: Optional[Union[batch.ListJobsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListJobsPager: r"""List all Jobs for a project within a region. @@ -949,11 +961,11 @@ def sample_list_jobs(): def get_task( self, - request: Union[batch.GetTaskRequest, dict] = None, + request: Optional[Union[batch.GetTaskRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> task.Task: r"""Return a single Task. @@ -1046,11 +1058,11 @@ def sample_get_task(): def list_tasks( self, - request: Union[batch.ListTasksRequest, dict] = None, + request: Optional[Union[batch.ListTasksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTasksPager: r"""List Tasks associated with a job. @@ -1173,10 +1185,10 @@ def __exit__(self, type, value, traceback): def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1227,10 +1239,10 @@ def list_operations( def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1281,10 +1293,10 @@ def get_operation( def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1336,10 +1348,10 @@ def delete_operation( def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1390,10 +1402,10 @@ def cancel_operation( def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1510,10 +1522,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1631,10 +1643,10 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -1690,10 +1702,10 @@ def test_iam_permissions( def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1744,10 +1756,10 @@ def get_location( def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1797,14 +1809,9 @@ def list_locations( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-batch", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("BatchServiceClient",) diff --git a/google/cloud/batch_v1/services/batch_service/pagers.py b/google/cloud/batch_v1/services/batch_service/pagers.py index 8b24c98..bc0f331 100644 --- a/google/cloud/batch_v1/services/batch_service/pagers.py +++ b/google/cloud/batch_v1/services/batch_service/pagers.py @@ -18,15 +18,13 @@ AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, ) -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import task +from google.cloud.batch_v1.types import batch, job, task class ListJobsPager: diff --git a/google/cloud/batch_v1/services/batch_service/transports/__init__.py b/google/cloud/batch_v1/services/batch_service/transports/__init__.py index 51fcc77..cd9b864 100644 --- a/google/cloud/batch_v1/services/batch_service/transports/__init__.py +++ b/google/cloud/batch_v1/services/batch_service/transports/__init__.py @@ -19,9 +19,7 @@ from .base import BatchServiceTransport from .grpc import BatchServiceGrpcTransport from .grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .rest import BatchServiceRestTransport -from .rest import BatchServiceRestInterceptor - +from .rest import BatchServiceRestInterceptor, BatchServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[BatchServiceTransport]] diff --git a/google/cloud/batch_v1/services/batch_service/transports/base.py b/google/cloud/batch_v1/services/batch_service/transports/base.py index 9e7d34f..b680987 100644 --- a/google/cloud/batch_v1/services/batch_service/transports/base.py +++ b/google/cloud/batch_v1/services/batch_service/transports/base.py @@ -15,35 +15,28 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.batch_v1 import gapic_version as package_version from google.cloud.batch_v1.types import batch from google.cloud.batch_v1.types import job from google.cloud.batch_v1.types import job as gcb_job from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-batch", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class BatchServiceTransport(abc.ABC): @@ -57,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/batch_v1/services/batch_service/transports/grpc.py b/google/cloud/batch_v1/services/batch_service/transports/grpc.py index 1c43bd5..e5b433e 100644 --- a/google/cloud/batch_v1/services/batch_service/transports/grpc.py +++ b/google/cloud/batch_v1/services/batch_service/transports/grpc.py @@ -13,28 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.batch_v1.types import batch from google.cloud.batch_v1.types import job from google.cloud.batch_v1.types import job as gcb_job from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport class BatchServiceGrpcTransport(BatchServiceTransport): @@ -58,14 +55,14 @@ def __init__( self, *, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -193,8 +190,8 @@ def __init__( def create_channel( cls, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py b/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py index 991e7b1..e4feec1 100644 --- a/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py +++ b/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py @@ -13,15 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -29,12 +30,8 @@ from google.cloud.batch_v1.types import job from google.cloud.batch_v1.types import job as gcb_job from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .grpc import BatchServiceGrpcTransport @@ -60,7 +57,7 @@ class BatchServiceGrpcAsyncIOTransport(BatchServiceTransport): def create_channel( cls, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -103,15 +100,15 @@ def __init__( self, *, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/batch_v1/services/batch_service/transports/rest.py b/google/cloud/batch_v1/services/batch_service/transports/rest.py index dac22d6..d904994 100644 --- a/google/cloud/batch_v1/services/batch_service/transports/rest.py +++ b/google/cloud/batch_v1/services/batch_service/transports/rest.py @@ -14,29 +14,31 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -44,14 +46,15 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore + from google.cloud.batch_v1.types import batch from google.cloud.batch_v1.types import job from google.cloud.batch_v1.types import job as gcb_job from google.cloud.batch_v1.types import task -from google.longrunning import operations_pb2 # type: ignore - -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import BatchServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -59,6 +62,10 @@ rest_version=requests_version, ) +# TODO (numeric enums): This file was generated with the option to +# request that the server respond with enums JSON-encoded as +# numbers. The code below does not implement that functionality yet. + class BatchServiceRestInterceptor: """Interceptor for BatchService. @@ -471,19 +478,16 @@ class BatchServiceRestTransport(BatchServiceTransport): It sends JSON representations of protocol buffers over HTTP/1.1 - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! """ def __init__( self, *, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -493,39 +497,35 @@ def __init__( ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -600,6 +600,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: credentials=self._credentials, scopes=self._scopes, http_options=http_options, + path_prefix="v1", ) self._operations_client = operations_v1.AbstractOperationsClient( @@ -628,7 +629,7 @@ def __call__( request: batch.CreateJobRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gcb_job.Job: r"""Call the create job method over HTTP. @@ -663,7 +664,7 @@ def __call__( body = json_format.MessageToJson( transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -673,11 +674,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -711,7 +714,7 @@ def __call__( request: batch.DeleteJobRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the delete job method over HTTP. @@ -751,10 +754,12 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -795,7 +800,7 @@ def __call__( request: batch.GetJobRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> job.Job: r"""Call the get job method over HTTP. @@ -832,11 +837,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -879,7 +886,7 @@ def __call__( request: batch.GetTaskRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> task.Task: r"""Call the get task method over HTTP. @@ -916,11 +923,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -953,7 +962,7 @@ def __call__( request: batch.ListJobsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> batch.ListJobsResponse: r"""Call the list jobs method over HTTP. @@ -990,10 +999,12 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -1036,7 +1047,7 @@ def __call__( request: batch.ListTasksRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> batch.ListTasksResponse: r"""Call the list tasks method over HTTP. @@ -1073,11 +1084,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -1149,7 +1162,7 @@ def __call__( request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: @@ -1216,7 +1229,7 @@ def __call__( request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: @@ -1283,7 +1296,7 @@ def __call__( request: iam_policy_pb2.GetIamPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1358,7 +1371,7 @@ def __call__( request: iam_policy_pb2.SetIamPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1438,7 +1451,7 @@ def __call__( request: iam_policy_pb2.TestIamPermissionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: @@ -1520,7 +1533,7 @@ def __call__( request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1586,7 +1599,7 @@ def __call__( request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1649,7 +1662,7 @@ def __call__( request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: @@ -1716,7 +1729,7 @@ def __call__( request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: diff --git a/google/cloud/batch_v1/types/__init__.py b/google/cloud/batch_v1/types/__init__.py index 1921f05..5db63f0 100644 --- a/google/cloud/batch_v1/types/__init__.py +++ b/google/cloud/batch_v1/types/__init__.py @@ -44,11 +44,7 @@ TaskSpec, TaskStatus, ) -from .volume import ( - GCS, - NFS, - Volume, -) +from .volume import GCS, NFS, Volume __all__ = ( "CreateJobRequest", diff --git a/google/cloud/batch_v1/types/batch.py b/google/cloud/batch_v1/types/batch.py index b1ceb1f..a31b3b8 100644 --- a/google/cloud/batch_v1/types/batch.py +++ b/google/cloud/batch_v1/types/batch.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.batch_v1.types import job as gcb_job from google.cloud.batch_v1.types import task -from google.protobuf import timestamp_pb2 # type: ignore - __protobuf__ = proto.module( package="google.cloud.batch.v1", @@ -46,10 +47,11 @@ class CreateJobRequest(proto.Message): "projects/{project}/locations/{location}". job_id (str): ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters. Only - alphanumeric characters or '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. + scope. This field should contain at most 63 characters and + must start with lowercase characters. Only lowercase + characters, numbers and '-' are accepted. The '-' character + cannot be the first or the last one. A system generated ID + will be used if the field is not set. The job.name field in the request will be ignored and the created resource name of the Job will be @@ -76,20 +78,20 @@ class CreateJobRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - job_id = proto.Field( + job_id: str = proto.Field( proto.STRING, number=2, ) - job = proto.Field( + job: gcb_job.Job = proto.Field( proto.MESSAGE, number=3, message=gcb_job.Job, ) - request_id = proto.Field( + request_id: str = proto.Field( proto.STRING, number=4, ) @@ -103,7 +105,7 @@ class GetJobRequest(proto.Message): Required. Job name. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -137,15 +139,15 @@ class DeleteJobRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - reason = proto.Field( + reason: str = proto.Field( proto.STRING, number=2, ) - request_id = proto.Field( + request_id: str = proto.Field( proto.STRING, number=4, ) @@ -165,19 +167,19 @@ class ListJobsRequest(proto.Message): Page token. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=4, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -187,11 +189,11 @@ class ListJobsResponse(proto.Message): r"""ListJob Response. Attributes: - jobs (Sequence[google.cloud.batch_v1.types.Job]): + jobs (MutableSequence[google.cloud.batch_v1.types.Job]): Jobs. next_page_token (str): Next page token. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -199,16 +201,16 @@ class ListJobsResponse(proto.Message): def raw_page(self): return self - jobs = proto.RepeatedField( + jobs: MutableSequence[gcb_job.Job] = proto.RepeatedField( proto.MESSAGE, number=1, message=gcb_job.Job, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -232,19 +234,19 @@ class ListTasksRequest(proto.Message): Page token. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -254,11 +256,11 @@ class ListTasksResponse(proto.Message): r"""ListTasks Response. Attributes: - tasks (Sequence[google.cloud.batch_v1.types.Task]): + tasks (MutableSequence[google.cloud.batch_v1.types.Task]): Tasks. next_page_token (str): Next page token. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -266,16 +268,16 @@ class ListTasksResponse(proto.Message): def raw_page(self): return self - tasks = proto.RepeatedField( + tasks: MutableSequence[task.Task] = proto.RepeatedField( proto.MESSAGE, number=1, message=task.Task, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -289,7 +291,7 @@ class GetTaskRequest(proto.Message): Required. Task name. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -325,33 +327,33 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_message = proto.Field( + status_message: str = proto.Field( proto.STRING, number=5, ) - requested_cancellation = proto.Field( + requested_cancellation: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) diff --git a/google/cloud/batch_v1/types/job.py b/google/cloud/batch_v1/types/job.py index 2463c95..bbc4329 100644 --- a/google/cloud/batch_v1/types/job.py +++ b/google/cloud/batch_v1/types/job.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from typing import MutableMapping, MutableSequence -from google.cloud.batch_v1.types import task from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.batch_v1.types import task __protobuf__ = proto.module( package="google.cloud.batch.v1", @@ -49,13 +50,13 @@ class Job(proto.Message): Priority of the Job. The valid value range is [0, 100). A job with higher priority value is more likely to run earlier if all other requirements are satisfied. - task_groups (Sequence[google.cloud.batch_v1.types.TaskGroup]): + task_groups (MutableSequence[google.cloud.batch_v1.types.TaskGroup]): Required. TaskGroups in the Job. Only one TaskGroup is supported now. allocation_policy (google.cloud.batch_v1.types.AllocationPolicy): Compute resource allocation for all TaskGroups in the Job. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels for the Job. Labels could be user provided or system generated. For example, "labels": { "department": "finance", "environment": "test" } You can assign up to 64 labels. @@ -73,58 +74,58 @@ class Job(proto.Message): updated. logs_policy (google.cloud.batch_v1.types.LogsPolicy): Log preservation policy for the Job. - notifications (Sequence[google.cloud.batch_v1.types.JobNotification]): + notifications (MutableSequence[google.cloud.batch_v1.types.JobNotification]): Notification configurations. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - uid = proto.Field( + uid: str = proto.Field( proto.STRING, number=2, ) - priority = proto.Field( + priority: int = proto.Field( proto.INT64, number=3, ) - task_groups = proto.RepeatedField( + task_groups: MutableSequence["TaskGroup"] = proto.RepeatedField( proto.MESSAGE, number=4, message="TaskGroup", ) - allocation_policy = proto.Field( + allocation_policy: "AllocationPolicy" = proto.Field( proto.MESSAGE, number=7, message="AllocationPolicy", ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=8, ) - status = proto.Field( + status: "JobStatus" = proto.Field( proto.MESSAGE, number=9, message="JobStatus", ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=12, message=timestamp_pb2.Timestamp, ) - logs_policy = proto.Field( + logs_policy: "LogsPolicy" = proto.Field( proto.MESSAGE, number=13, message="LogsPolicy", ) - notifications = proto.RepeatedField( + notifications: MutableSequence["JobNotification"] = proto.RepeatedField( proto.MESSAGE, number=14, message="JobNotification", @@ -152,12 +153,12 @@ class Destination(proto.Enum): CLOUD_LOGGING = 1 PATH = 2 - destination = proto.Field( + destination: Destination = proto.Field( proto.ENUM, number=1, enum=Destination, ) - logs_path = proto.Field( + logs_path: str = proto.Field( proto.STRING, number=2, ) @@ -169,9 +170,9 @@ class JobStatus(proto.Message): Attributes: state (google.cloud.batch_v1.types.JobStatus.State): Job state - status_events (Sequence[google.cloud.batch_v1.types.StatusEvent]): + status_events (MutableSequence[google.cloud.batch_v1.types.StatusEvent]): Job status events - task_groups (Mapping[str, google.cloud.batch_v1.types.JobStatus.TaskGroupStatus]): + task_groups (MutableMapping[str, google.cloud.batch_v1.types.JobStatus.TaskGroupStatus]): Aggregated task status for each TaskGroup in the Job. The map key is TaskGroup ID. run_duration (google.protobuf.duration_pb2.Duration): @@ -202,16 +203,16 @@ class InstanceStatus(proto.Message): this instance type. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=1, ) - provisioning_model = proto.Field( + provisioning_model: "AllocationPolicy.ProvisioningModel" = proto.Field( proto.ENUM, number=2, enum="AllocationPolicy.ProvisioningModel", ) - task_pack = proto.Field( + task_pack: int = proto.Field( proto.INT64, number=3, ) @@ -220,42 +221,42 @@ class TaskGroupStatus(proto.Message): r"""Aggregated task status for a TaskGroup. Attributes: - counts (Mapping[str, int]): + counts (MutableMapping[str, int]): Count of task in each state in the TaskGroup. The map key is task state name. - instances (Sequence[google.cloud.batch_v1.types.JobStatus.InstanceStatus]): + instances (MutableSequence[google.cloud.batch_v1.types.JobStatus.InstanceStatus]): Status of instances allocated for the TaskGroup. """ - counts = proto.MapField( + counts: MutableMapping[str, int] = proto.MapField( proto.STRING, proto.INT64, number=1, ) - instances = proto.RepeatedField( + instances: MutableSequence["JobStatus.InstanceStatus"] = proto.RepeatedField( proto.MESSAGE, number=2, message="JobStatus.InstanceStatus", ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=1, enum=State, ) - status_events = proto.RepeatedField( + status_events: MutableSequence[task.StatusEvent] = proto.RepeatedField( proto.MESSAGE, number=2, message=task.StatusEvent, ) - task_groups = proto.MapField( + task_groups: MutableMapping[str, TaskGroupStatus] = proto.MapField( proto.STRING, proto.MESSAGE, number=4, message=TaskGroupStatus, ) - run_duration = proto.Field( + run_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=5, message=duration_pb2.Duration, @@ -299,27 +300,27 @@ class Message(proto.Message): The new task state. """ - type_ = proto.Field( + type_: "JobNotification.Type" = proto.Field( proto.ENUM, number=1, enum="JobNotification.Type", ) - new_job_state = proto.Field( + new_job_state: "JobStatus.State" = proto.Field( proto.ENUM, number=2, enum="JobStatus.State", ) - new_task_state = proto.Field( + new_task_state: task.TaskStatus.State = proto.Field( proto.ENUM, number=3, enum=task.TaskStatus.State, ) - pubsub_topic = proto.Field( + pubsub_topic: str = proto.Field( proto.STRING, number=1, ) - message = proto.Field( + message: Message = proto.Field( proto.MESSAGE, number=2, message=Message, @@ -334,12 +335,12 @@ class AllocationPolicy(proto.Message): location (google.cloud.batch_v1.types.AllocationPolicy.LocationPolicy): Location where compute resources should be allocated for the Job. - instances (Sequence[google.cloud.batch_v1.types.AllocationPolicy.InstancePolicyOrTemplate]): + instances (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.InstancePolicyOrTemplate]): Describe instances that can be created by this AllocationPolicy. Only instances[0] is supported now. service_account (google.cloud.batch_v1.types.ServiceAccount): Service account that VMs will run as. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels applied to all VM instances and other resources created by AllocationPolicy. Labels could be user provided or system generated. You can assign up to 64 labels. `Google @@ -362,7 +363,7 @@ class LocationPolicy(proto.Message): r""" Attributes: - allowed_locations (Sequence[str]): + allowed_locations (MutableSequence[str]): A list of allowed location names represented by internal URLs. Each location can be a region or a zone. Only one region or multiple zones in one region is supported now. For @@ -376,7 +377,7 @@ class LocationPolicy(proto.Message): "us-west1". An error is expected in this case. """ - allowed_locations = proto.RepeatedField( + allowed_locations: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) @@ -420,32 +421,32 @@ class Disk(proto.Message): support "SCSI" for persistent disks now. """ - image = proto.Field( + image: str = proto.Field( proto.STRING, number=4, oneof="data_source", ) - snapshot = proto.Field( + snapshot: str = proto.Field( proto.STRING, number=5, oneof="data_source", ) - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=1, ) - size_gb = proto.Field( + size_gb: int = proto.Field( proto.INT64, number=2, ) - disk_interface = proto.Field( + disk_interface: str = proto.Field( proto.STRING, number=6, ) class AttachedDisk(proto.Message): - r"""A new or an existing persistent disk or a local ssd attached - to a VM instance. + r"""A new or an existing persistent disk (PD) or a local ssd + attached to a VM instance. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -469,25 +470,25 @@ class AttachedDisk(proto.Message): disk, and it should match the device_name field in volumes. """ - new_disk = proto.Field( + new_disk: "AllocationPolicy.Disk" = proto.Field( proto.MESSAGE, number=1, oneof="attached", message="AllocationPolicy.Disk", ) - existing_disk = proto.Field( + existing_disk: str = proto.Field( proto.STRING, number=2, oneof="attached", ) - device_name = proto.Field( + device_name: str = proto.Field( proto.STRING, number=3, ) class Accelerator(proto.Message): r"""Accelerator describes Compute Engine accelerators to be - attached to VMs. + attached to the VM. Attributes: type_ (str): @@ -500,15 +501,15 @@ class Accelerator(proto.Message): instead. """ - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=1, ) - count = proto.Field( + count: int = proto.Field( proto.INT64, number=2, ) - install_gpu_drivers = proto.Field( + install_gpu_drivers: bool = proto.Field( proto.BOOL, number=3, ) @@ -526,34 +527,36 @@ class InstancePolicy(proto.Message): Not yet implemented. provisioning_model (google.cloud.batch_v1.types.AllocationPolicy.ProvisioningModel): The provisioning model. - accelerators (Sequence[google.cloud.batch_v1.types.AllocationPolicy.Accelerator]): + accelerators (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.Accelerator]): The accelerators attached to each VM - instance. Not yet implemented. - disks (Sequence[google.cloud.batch_v1.types.AllocationPolicy.AttachedDisk]): + instance. + disks (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.AttachedDisk]): Non-boot disks to be attached for each VM created by this InstancePolicy. New disks will - be deleted when the attached VM is deleted. + be deleted when the VM is deleted. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=2, ) - min_cpu_platform = proto.Field( + min_cpu_platform: str = proto.Field( proto.STRING, number=3, ) - provisioning_model = proto.Field( + provisioning_model: "AllocationPolicy.ProvisioningModel" = proto.Field( proto.ENUM, number=4, enum="AllocationPolicy.ProvisioningModel", ) - accelerators = proto.RepeatedField( + accelerators: MutableSequence[ + "AllocationPolicy.Accelerator" + ] = proto.RepeatedField( proto.MESSAGE, number=5, message="AllocationPolicy.Accelerator", ) - disks = proto.RepeatedField( + disks: MutableSequence["AllocationPolicy.AttachedDisk"] = proto.RepeatedField( proto.MESSAGE, number=6, message="AllocationPolicy.AttachedDisk", @@ -587,18 +590,18 @@ class InstancePolicyOrTemplate(proto.Message): on their behalf. Default is false. """ - policy = proto.Field( + policy: "AllocationPolicy.InstancePolicy" = proto.Field( proto.MESSAGE, number=1, oneof="policy_template", message="AllocationPolicy.InstancePolicy", ) - instance_template = proto.Field( + instance_template: str = proto.Field( proto.STRING, number=2, oneof="policy_template", ) - install_gpu_drivers = proto.Field( + install_gpu_drivers: bool = proto.Field( proto.BOOL, number=3, ) @@ -624,15 +627,15 @@ class NetworkInterface(proto.Message): for more information. """ - network = proto.Field( + network: str = proto.Field( proto.STRING, number=1, ) - subnetwork = proto.Field( + subnetwork: str = proto.Field( proto.STRING, number=2, ) - no_external_ip_address = proto.Field( + no_external_ip_address: bool = proto.Field( proto.BOOL, number=3, ) @@ -641,37 +644,39 @@ class NetworkPolicy(proto.Message): r"""NetworkPolicy describes VM instance network configurations. Attributes: - network_interfaces (Sequence[google.cloud.batch_v1.types.AllocationPolicy.NetworkInterface]): + network_interfaces (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.NetworkInterface]): Network configurations. """ - network_interfaces = proto.RepeatedField( + network_interfaces: MutableSequence[ + "AllocationPolicy.NetworkInterface" + ] = proto.RepeatedField( proto.MESSAGE, number=1, message="AllocationPolicy.NetworkInterface", ) - location = proto.Field( + location: LocationPolicy = proto.Field( proto.MESSAGE, number=1, message=LocationPolicy, ) - instances = proto.RepeatedField( + instances: MutableSequence[InstancePolicyOrTemplate] = proto.RepeatedField( proto.MESSAGE, number=8, message=InstancePolicyOrTemplate, ) - service_account = proto.Field( + service_account: "ServiceAccount" = proto.Field( proto.MESSAGE, number=9, message="ServiceAccount", ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, ) - network = proto.Field( + network: NetworkPolicy = proto.Field( proto.MESSAGE, number=7, message=NetworkPolicy, @@ -697,7 +702,7 @@ class TaskGroup(proto.Message): parallelism (int): Max number of tasks that can run in parallel. Default to min(task_count, 1000). - task_environments (Sequence[google.cloud.batch_v1.types.Environment]): + task_environments (MutableSequence[google.cloud.batch_v1.types.Environment]): An array of environment variable mappings, which are passed to Tasks with matching indices. If task_environments is used then task_count should not be specified in the request (and @@ -728,37 +733,37 @@ class TaskGroup(proto.Message): tasks in the same TaskGroup. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - task_spec = proto.Field( + task_spec: task.TaskSpec = proto.Field( proto.MESSAGE, number=3, message=task.TaskSpec, ) - task_count = proto.Field( + task_count: int = proto.Field( proto.INT64, number=4, ) - parallelism = proto.Field( + parallelism: int = proto.Field( proto.INT64, number=5, ) - task_environments = proto.RepeatedField( + task_environments: MutableSequence[task.Environment] = proto.RepeatedField( proto.MESSAGE, number=9, message=task.Environment, ) - task_count_per_node = proto.Field( + task_count_per_node: int = proto.Field( proto.INT64, number=10, ) - require_hosts_file = proto.Field( + require_hosts_file: bool = proto.Field( proto.BOOL, number=11, ) - permissive_ssh = proto.Field( + permissive_ssh: bool = proto.Field( proto.BOOL, number=12, ) @@ -778,7 +783,7 @@ class ServiceAccount(proto.Message): here. """ - email = proto.Field( + email: str = proto.Field( proto.STRING, number=1, ) diff --git a/google/cloud/batch_v1/types/task.py b/google/cloud/batch_v1/types/task.py index 9a33bb9..9286f34 100644 --- a/google/cloud/batch_v1/types/task.py +++ b/google/cloud/batch_v1/types/task.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from typing import MutableMapping, MutableSequence -from google.cloud.batch_v1.types import volume from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.batch_v1.types import volume __protobuf__ = proto.module( package="google.cloud.batch.v1", @@ -48,15 +49,15 @@ class ComputeResource(proto.Message): Extra boot disk size in MiB for each task. """ - cpu_milli = proto.Field( + cpu_milli: int = proto.Field( proto.INT64, number=1, ) - memory_mib = proto.Field( + memory_mib: int = proto.Field( proto.INT64, number=2, ) - boot_disk_mib = proto.Field( + boot_disk_mib: int = proto.Field( proto.INT64, number=4, ) @@ -76,20 +77,20 @@ class StatusEvent(proto.Message): Task Execution """ - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=3, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=1, ) - event_time = proto.Field( + event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - task_execution = proto.Field( + task_execution: "TaskExecution" = proto.Field( proto.MESSAGE, number=4, message="TaskExecution", @@ -107,7 +108,7 @@ class TaskExecution(proto.Message): execution result, default is 0 as success. """ - exit_code = proto.Field( + exit_code: int = proto.Field( proto.INT32, number=1, ) @@ -119,7 +120,7 @@ class TaskStatus(proto.Message): Attributes: state (google.cloud.batch_v1.types.TaskStatus.State): Task state - status_events (Sequence[google.cloud.batch_v1.types.StatusEvent]): + status_events (MutableSequence[google.cloud.batch_v1.types.StatusEvent]): Detailed info about why the state is reached. """ @@ -132,12 +133,12 @@ class State(proto.Enum): FAILED = 4 SUCCEEDED = 5 - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=1, enum=State, ) - status_events = proto.RepeatedField( + status_events: MutableSequence["StatusEvent"] = proto.RepeatedField( proto.MESSAGE, number=2, message="StatusEvent", @@ -204,14 +205,14 @@ class Container(proto.Message): Attributes: image_uri (str): The URI to pull the container image from. - commands (Sequence[str]): + commands (MutableSequence[str]): Overrides the ``CMD`` specified in the container. If there is an ENTRYPOINT (either in the container image or with the entrypoint field below) then commands are appended as arguments to the ENTRYPOINT. entrypoint (str): Overrides the ``ENTRYPOINT`` specified in the container. - volumes (Sequence[str]): + volumes (MutableSequence[str]): Volumes to mount (bind mount) from the host machine files or directories into the container, formatted to match docker run's --volume option, @@ -227,43 +228,43 @@ class Container(proto.Message): 'goog-internal'. username (str): Optional username for logging in to a docker registry. If - username matches `projects/*/secrets/*/versions/*` then + username matches ``projects/*/secrets/*/versions/*`` then Batch will read the username from the Secret Manager. password (str): Optional password for logging in to a docker registry. If - password matches `projects/*/secrets/*/versions/*` then + password matches ``projects/*/secrets/*/versions/*`` then Batch will read the password from the Secret Manager; """ - image_uri = proto.Field( + image_uri: str = proto.Field( proto.STRING, number=1, ) - commands = proto.RepeatedField( + commands: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - entrypoint = proto.Field( + entrypoint: str = proto.Field( proto.STRING, number=3, ) - volumes = proto.RepeatedField( + volumes: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=7, ) - options = proto.Field( + options: str = proto.Field( proto.STRING, number=8, ) - block_external_network = proto.Field( + block_external_network: bool = proto.Field( proto.BOOL, number=9, ) - username = proto.Field( + username: str = proto.Field( proto.STRING, number=10, ) - password = proto.Field( + password: str = proto.Field( proto.STRING, number=11, ) @@ -289,12 +290,12 @@ class Script(proto.Message): This field is a member of `oneof`_ ``command``. """ - path = proto.Field( + path: str = proto.Field( proto.STRING, number=1, oneof="command", ) - text = proto.Field( + text: str = proto.Field( proto.STRING, number=2, oneof="command", @@ -311,47 +312,47 @@ class Barrier(proto.Message): present should be an identifier. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - container = proto.Field( + container: Container = proto.Field( proto.MESSAGE, number=1, oneof="executable", message=Container, ) - script = proto.Field( + script: Script = proto.Field( proto.MESSAGE, number=2, oneof="executable", message=Script, ) - barrier = proto.Field( + barrier: Barrier = proto.Field( proto.MESSAGE, number=6, oneof="executable", message=Barrier, ) - ignore_exit_status = proto.Field( + ignore_exit_status: bool = proto.Field( proto.BOOL, number=3, ) - background = proto.Field( + background: bool = proto.Field( proto.BOOL, number=4, ) - always_run = proto.Field( + always_run: bool = proto.Field( proto.BOOL, number=5, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=7, message="Environment", ) - timeout = proto.Field( + timeout: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=8, message=duration_pb2.Duration, @@ -362,7 +363,7 @@ class TaskSpec(proto.Message): r"""Spec of a task Attributes: - runnables (Sequence[google.cloud.batch_v1.types.Runnable]): + runnables (MutableSequence[google.cloud.batch_v1.types.Runnable]): The sequence of scripts or containers to run for this Task. Each Task using this TaskSpec executes its list of runnables in order. The Task succeeds if all of its runnables either @@ -384,7 +385,7 @@ class TaskSpec(proto.Message): max_retry_count (int): Maximum number of retries on failures. The default, 0, which means never retry. The valid value range is [0, 10]. - lifecycle_policies (Sequence[google.cloud.batch_v1.types.LifecyclePolicy]): + lifecycle_policies (MutableSequence[google.cloud.batch_v1.types.LifecyclePolicy]): Lifecycle management schema when any task in a task group is failed. The valid size of lifecycle policies are [0, 10]. For each lifecycle policy, when the condition is met, the @@ -395,10 +396,10 @@ class TaskSpec(proto.Message): policy, we consider it as the default policy. Default policy means if the exit code is 0, exit task. If task ends with non-zero exit code, retry the task with max_retry_count. - environments (Mapping[str, str]): + environments (MutableMapping[str, str]): Environment variables to set before running the Task. You can set up to 100 environments. - volumes (Sequence[google.cloud.batch_v1.types.Volume]): + volumes (MutableSequence[google.cloud.batch_v1.types.Volume]): Volumes to mount before running Tasks using this TaskSpec. environment (google.cloud.batch_v1.types.Environment): @@ -406,41 +407,41 @@ class TaskSpec(proto.Message): the Task. """ - runnables = proto.RepeatedField( + runnables: MutableSequence["Runnable"] = proto.RepeatedField( proto.MESSAGE, number=8, message="Runnable", ) - compute_resource = proto.Field( + compute_resource: "ComputeResource" = proto.Field( proto.MESSAGE, number=3, message="ComputeResource", ) - max_run_duration = proto.Field( + max_run_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=4, message=duration_pb2.Duration, ) - max_retry_count = proto.Field( + max_retry_count: int = proto.Field( proto.INT32, number=5, ) - lifecycle_policies = proto.RepeatedField( + lifecycle_policies: MutableSequence["LifecyclePolicy"] = proto.RepeatedField( proto.MESSAGE, number=9, message="LifecyclePolicy", ) - environments = proto.MapField( + environments: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, ) - volumes = proto.RepeatedField( + volumes: MutableSequence[volume.Volume] = proto.RepeatedField( proto.MESSAGE, number=7, message=volume.Volume, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=10, message="Environment", @@ -470,7 +471,7 @@ class ActionCondition(proto.Message): r"""Conditions for actions to deal with task failures. Attributes: - exit_codes (Sequence[int]): + exit_codes (MutableSequence[int]): Exit codes of a task execution. If there are more than 1 exit codes, when task executes with any of the exit code in @@ -478,17 +479,17 @@ class ActionCondition(proto.Message): will be executed. """ - exit_codes = proto.RepeatedField( + exit_codes: MutableSequence[int] = proto.RepeatedField( proto.INT32, number=1, ) - action = proto.Field( + action: Action = proto.Field( proto.ENUM, number=1, enum=Action, ) - action_condition = proto.Field( + action_condition: ActionCondition = proto.Field( proto.MESSAGE, number=2, message=ActionCondition, @@ -508,11 +509,11 @@ class Task(proto.Message): Task Status. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - status = proto.Field( + status: "TaskStatus" = proto.Field( proto.MESSAGE, number=2, message="TaskStatus", @@ -524,12 +525,12 @@ class Environment(proto.Message): variables to set when executing Tasks. Attributes: - variables (Mapping[str, str]): + variables (MutableMapping[str, str]): A map of environment variable names to values. """ - variables = proto.MapField( + variables: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=1, diff --git a/google/cloud/batch_v1/types/volume.py b/google/cloud/batch_v1/types/volume.py index b6d6aa1..ac1e42c 100644 --- a/google/cloud/batch_v1/types/volume.py +++ b/google/cloud/batch_v1/types/volume.py @@ -13,8 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from typing import MutableMapping, MutableSequence +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.batch.v1", @@ -27,9 +28,8 @@ class Volume(proto.Message): - r"""Volume and mount parameters to be associated with a TaskSpec. - A TaskSpec might describe zero, one, or multiple volumes to be - mounted as part of the task. + r"""Volume describes a volume and parameters for it to be mounted + to a VM. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -40,80 +40,91 @@ class Volume(proto.Message): Attributes: nfs (google.cloud.batch_v1.types.NFS): - An NFS source for the volume (could be a - Filestore, for example). + A Network File System (NFS) volume. For + example, a Filestore file share. This field is a member of `oneof`_ ``source``. gcs (google.cloud.batch_v1.types.GCS): - A Google Cloud Storage source for the volume. + A Google Cloud Storage (GCS) volume. This field is a member of `oneof`_ ``source``. device_name (str): - Device name of an attached disk + Device name of an attached disk volume, which should align + with a device_name specified by + job.allocation_policy.instances[0].policy.disks[i].device_name + or defined by the given instance template in + job.allocation_policy.instances[0].instance_template. This field is a member of `oneof`_ ``source``. mount_path (str): - Mount path for the volume, e.g. /mnt/share - mount_options (Sequence[str]): - Mount options For Google Cloud Storage, mount options are - the global options supported by gcsfuse tool. Batch will use - them to mount the volume with the following command: - "gcsfuse [global options] bucket mountpoint". For PD, NFS, - mount options are these supported by /etc/fstab. Batch will - use Fstab to mount such volumes. - https://help.ubuntu.com/community/Fstab + The mount path for the volume, e.g. + /mnt/disks/share. + mount_options (MutableSequence[str]): + For Google Cloud Storage (GCS), mount options + are the options supported by the gcsfuse tool + (https://github.com/GoogleCloudPlatform/gcsfuse). + For existing persistent disks, mount options + provided by the mount command + (https://man7.org/linux/man-pages/man8/mount.8.html) + except writing are supported. This is due to + restrictions of multi-writer mode + (https://cloud.google.com/compute/docs/disks/sharing-disks-between-vms). + For other attached disks and Network File System + (NFS), mount options are these supported by the + mount command + (https://man7.org/linux/man-pages/man8/mount.8.html). """ - nfs = proto.Field( + nfs: "NFS" = proto.Field( proto.MESSAGE, number=1, oneof="source", message="NFS", ) - gcs = proto.Field( + gcs: "GCS" = proto.Field( proto.MESSAGE, number=3, oneof="source", message="GCS", ) - device_name = proto.Field( + device_name: str = proto.Field( proto.STRING, number=6, oneof="source", ) - mount_path = proto.Field( + mount_path: str = proto.Field( proto.STRING, number=4, ) - mount_options = proto.RepeatedField( + mount_options: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) class NFS(proto.Message): - r"""Represents an NFS server and remote path: : + r"""Represents an NFS volume. Attributes: server (str): - URI of the NFS server, e.g. an IP address. + The IP address of the NFS. remote_path (str): - Remote source path exported from NFS, e.g., - "/share". + Remote source path exported from the NFS, + e.g., "/share". """ - server = proto.Field( + server: str = proto.Field( proto.STRING, number=1, ) - remote_path = proto.Field( + remote_path: str = proto.Field( proto.STRING, number=2, ) class GCS(proto.Message): - r"""Represents a Google Cloud Storage volume source config. + r"""Represents a Google Cloud Storage volume. Attributes: remote_path (str): @@ -121,7 +132,7 @@ class GCS(proto.Message): bucket, e.g.: bucket_name, bucket_name/subdirectory/ """ - remote_path = proto.Field( + remote_path: str = proto.Field( proto.STRING, number=1, ) diff --git a/google/cloud/batch_v1alpha/__init__.py b/google/cloud/batch_v1alpha/__init__.py index 01875fd..c79fd3e 100644 --- a/google/cloud/batch_v1alpha/__init__.py +++ b/google/cloud/batch_v1alpha/__init__.py @@ -13,40 +13,45 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.batch import gapic_version as package_version -from .services.batch_service import BatchServiceClient -from .services.batch_service import BatchServiceAsyncClient +__version__ = package_version.__version__ -from .types.batch import CreateJobRequest -from .types.batch import DeleteJobRequest -from .types.batch import GetJobRequest -from .types.batch import GetTaskRequest -from .types.batch import ListJobsRequest -from .types.batch import ListJobsResponse -from .types.batch import ListTasksRequest -from .types.batch import ListTasksResponse -from .types.batch import OperationMetadata -from .types.job import AllocationPolicy -from .types.job import Job -from .types.job import JobDependency -from .types.job import JobNotification -from .types.job import JobStatus -from .types.job import LogsPolicy -from .types.job import ServiceAccount -from .types.job import TaskGroup -from .types.task import ComputeResource -from .types.task import Environment -from .types.task import LifecyclePolicy -from .types.task import Runnable -from .types.task import StatusEvent -from .types.task import Task -from .types.task import TaskExecution -from .types.task import TaskSpec -from .types.task import TaskStatus -from .types.volume import GCS -from .types.volume import NFS -from .types.volume import PD -from .types.volume import Volume + +from .services.batch_service import BatchServiceAsyncClient, BatchServiceClient +from .types.batch import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + GetTaskRequest, + ListJobsRequest, + ListJobsResponse, + ListTasksRequest, + ListTasksResponse, + OperationMetadata, +) +from .types.job import ( + AllocationPolicy, + Job, + JobDependency, + JobNotification, + JobStatus, + LogsPolicy, + ServiceAccount, + TaskGroup, +) +from .types.task import ( + ComputeResource, + Environment, + LifecyclePolicy, + Runnable, + StatusEvent, + Task, + TaskExecution, + TaskSpec, + TaskStatus, +) +from .types.volume import GCS, NFS, PD, Volume __all__ = ( "BatchServiceAsyncClient", diff --git a/google/cloud/batch_v1alpha/gapic_version.py b/google/cloud/batch_v1alpha/gapic_version.py new file mode 100644 index 0000000..371eb6b --- /dev/null +++ b/google/cloud/batch_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.5.0" # {x-release-please-version} diff --git a/google/cloud/batch_v1alpha/services/batch_service/__init__.py b/google/cloud/batch_v1alpha/services/batch_service/__init__.py index 68fe726..450c3c1 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/__init__.py +++ b/google/cloud/batch_v1alpha/services/batch_service/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BatchServiceClient from .async_client import BatchServiceAsyncClient +from .client import BatchServiceClient __all__ = ( "BatchServiceClient", diff --git a/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/google/cloud/batch_v1alpha/services/batch_service/async_client.py index b30d247..63c2709 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -16,16 +16,27 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.batch_v1alpha import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -33,20 +44,22 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1alpha.services.batch_service import pagers -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import task from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport + +from google.cloud.batch_v1alpha.services.batch_service import pagers +from google.cloud.batch_v1alpha.types import batch +from google.cloud.batch_v1alpha.types import job +from google.cloud.batch_v1alpha.types import job as gcb_job +from google.cloud.batch_v1alpha.types import task + from .client import BatchServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BatchServiceTransport +from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport class BatchServiceAsyncClient: @@ -172,9 +185,9 @@ def transport(self) -> BatchServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, BatchServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the batch service client. @@ -218,13 +231,13 @@ def __init__( async def create_job( self, - request: Union[batch.CreateJobRequest, dict] = None, + request: Optional[Union[batch.CreateJobRequest, dict]] = None, *, - parent: str = None, - job: gcb_job.Job = None, - job_id: str = None, + parent: Optional[str] = None, + job: Optional[gcb_job.Job] = None, + job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -256,7 +269,7 @@ async def sample_create_job(): print(response) Args: - request (Union[google.cloud.batch_v1alpha.types.CreateJobRequest, dict]): + request (Optional[Union[google.cloud.batch_v1alpha.types.CreateJobRequest, dict]]): The request object. CreateJob Request. parent (:class:`str`): Required. The parent resource name @@ -273,11 +286,11 @@ async def sample_create_job(): should not be set. job_id (:class:`str`): ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters. - Only alphanumeric characters or '-' are accepted. The - '-' character cannot be the first or the last one. A - system generated ID will be used if the field is not - set. + scope. This field should contain at most 63 characters + and must start with lowercase characters. Only lowercase + characters, numbers and '-' are accepted. The '-' + character cannot be the first or the last one. A system + generated ID will be used if the field is not set. The job.name field in the request will be ignored and the created resource name of the Job will be @@ -344,11 +357,11 @@ async def sample_create_job(): async def get_job( self, - request: Union[batch.GetJobRequest, dict] = None, + request: Optional[Union[batch.GetJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -380,7 +393,7 @@ async def sample_get_job(): print(response) Args: - request (Union[google.cloud.batch_v1alpha.types.GetJobRequest, dict]): + request (Optional[Union[google.cloud.batch_v1alpha.types.GetJobRequest, dict]]): The request object. GetJob Request. name (:class:`str`): Required. Job name. @@ -450,11 +463,11 @@ async def sample_get_job(): async def delete_job( self, - request: Union[batch.DeleteJobRequest, dict] = None, + request: Optional[Union[batch.DeleteJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete a Job. @@ -483,13 +496,13 @@ async def sample_delete_job(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) Args: - request (Union[google.cloud.batch_v1alpha.types.DeleteJobRequest, dict]): + request (Optional[Union[google.cloud.batch_v1alpha.types.DeleteJobRequest, dict]]): The request object. DeleteJob Request. name (:class:`str`): Job name. @@ -570,11 +583,11 @@ async def sample_delete_job(): async def list_jobs( self, - request: Union[batch.ListJobsRequest, dict] = None, + request: Optional[Union[batch.ListJobsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListJobsAsyncPager: r"""List all Jobs for a project within a region. @@ -606,7 +619,7 @@ async def sample_list_jobs(): print(response) Args: - request (Union[google.cloud.batch_v1alpha.types.ListJobsRequest, dict]): + request (Optional[Union[google.cloud.batch_v1alpha.types.ListJobsRequest, dict]]): The request object. ListJob Request. parent (:class:`str`): Parent path. @@ -689,11 +702,11 @@ async def sample_list_jobs(): async def get_task( self, - request: Union[batch.GetTaskRequest, dict] = None, + request: Optional[Union[batch.GetTaskRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> task.Task: r"""Return a single Task. @@ -725,7 +738,7 @@ async def sample_get_task(): print(response) Args: - request (Union[google.cloud.batch_v1alpha.types.GetTaskRequest, dict]): + request (Optional[Union[google.cloud.batch_v1alpha.types.GetTaskRequest, dict]]): The request object. Request for a single Task by name. name (:class:`str`): Required. Task name. @@ -795,11 +808,11 @@ async def sample_get_task(): async def list_tasks( self, - request: Union[batch.ListTasksRequest, dict] = None, + request: Optional[Union[batch.ListTasksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTasksAsyncPager: r"""List Tasks associated with a job. @@ -832,7 +845,7 @@ async def sample_list_tasks(): print(response) Args: - request (Union[google.cloud.batch_v1alpha.types.ListTasksRequest, dict]): + request (Optional[Union[google.cloud.batch_v1alpha.types.ListTasksRequest, dict]]): The request object. ListTasks Request. parent (:class:`str`): Required. Name of a TaskGroup from which Tasks are being @@ -918,10 +931,10 @@ async def sample_list_tasks(): async def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -972,10 +985,10 @@ async def list_operations( async def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1026,10 +1039,10 @@ async def get_operation( async def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1081,10 +1094,10 @@ async def delete_operation( async def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1135,10 +1148,10 @@ async def cancel_operation( async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1255,10 +1268,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1376,10 +1389,10 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -1435,10 +1448,10 @@ async def test_iam_permissions( async def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1489,10 +1502,10 @@ async def get_location( async def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1548,14 +1561,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-batch", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("BatchServiceAsyncClient",) diff --git a/google/cloud/batch_v1alpha/services/batch_service/client.py b/google/cloud/batch_v1alpha/services/batch_service/client.py index 2e875e6..0750aae 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -16,19 +16,31 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.batch_v1alpha import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -36,18 +48,20 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1alpha.services.batch_service import pagers -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import task from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO + +from google.cloud.batch_v1alpha.services.batch_service import pagers +from google.cloud.batch_v1alpha.types import batch +from google.cloud.batch_v1alpha.types import job +from google.cloud.batch_v1alpha.types import job as gcb_job +from google.cloud.batch_v1alpha.types import task + +from .transports.base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .transports.grpc import BatchServiceGrpcTransport from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport from .transports.rest import BatchServiceRestTransport @@ -68,7 +82,7 @@ class BatchServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[BatchServiceTransport]: """Returns an appropriate transport class. @@ -396,8 +410,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, BatchServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, BatchServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the batch service client. @@ -411,10 +425,7 @@ def __init__( transport (Union[str, BatchServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -444,6 +455,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -496,13 +508,13 @@ def __init__( def create_job( self, - request: Union[batch.CreateJobRequest, dict] = None, + request: Optional[Union[batch.CreateJobRequest, dict]] = None, *, - parent: str = None, - job: gcb_job.Job = None, - job_id: str = None, + parent: Optional[str] = None, + job: Optional[gcb_job.Job] = None, + job_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gcb_job.Job: r"""Create a Job. @@ -551,11 +563,11 @@ def sample_create_job(): should not be set. job_id (str): ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters. - Only alphanumeric characters or '-' are accepted. The - '-' character cannot be the first or the last one. A - system generated ID will be used if the field is not - set. + scope. This field should contain at most 63 characters + and must start with lowercase characters. Only lowercase + characters, numbers and '-' are accepted. The '-' + character cannot be the first or the last one. A system + generated ID will be used if the field is not set. The job.name field in the request will be ignored and the created resource name of the Job will be @@ -622,11 +634,11 @@ def sample_create_job(): def get_job( self, - request: Union[batch.GetJobRequest, dict] = None, + request: Optional[Union[batch.GetJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> job.Job: r"""Get a Job specified by its resource name. @@ -719,11 +731,11 @@ def sample_get_job(): def delete_job( self, - request: Union[batch.DeleteJobRequest, dict] = None, + request: Optional[Union[batch.DeleteJobRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete a Job. @@ -839,11 +851,11 @@ def sample_delete_job(): def list_jobs( self, - request: Union[batch.ListJobsRequest, dict] = None, + request: Optional[Union[batch.ListJobsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListJobsPager: r"""List all Jobs for a project within a region. @@ -949,11 +961,11 @@ def sample_list_jobs(): def get_task( self, - request: Union[batch.GetTaskRequest, dict] = None, + request: Optional[Union[batch.GetTaskRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> task.Task: r"""Return a single Task. @@ -1046,11 +1058,11 @@ def sample_get_task(): def list_tasks( self, - request: Union[batch.ListTasksRequest, dict] = None, + request: Optional[Union[batch.ListTasksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTasksPager: r"""List Tasks associated with a job. @@ -1173,10 +1185,10 @@ def __exit__(self, type, value, traceback): def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1227,10 +1239,10 @@ def list_operations( def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1281,10 +1293,10 @@ def get_operation( def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1336,10 +1348,10 @@ def delete_operation( def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1390,10 +1402,10 @@ def cancel_operation( def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1510,10 +1522,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1631,10 +1643,10 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -1690,10 +1702,10 @@ def test_iam_permissions( def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1744,10 +1756,10 @@ def get_location( def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1797,14 +1809,9 @@ def list_locations( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-batch", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("BatchServiceClient",) diff --git a/google/cloud/batch_v1alpha/services/batch_service/pagers.py b/google/cloud/batch_v1alpha/services/batch_service/pagers.py index 3b18f45..e43e858 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/pagers.py +++ b/google/cloud/batch_v1alpha/services/batch_service/pagers.py @@ -18,15 +18,13 @@ AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, ) -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import task +from google.cloud.batch_v1alpha.types import batch, job, task class ListJobsPager: diff --git a/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py b/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py index 51fcc77..cd9b864 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py +++ b/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py @@ -19,9 +19,7 @@ from .base import BatchServiceTransport from .grpc import BatchServiceGrpcTransport from .grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .rest import BatchServiceRestTransport -from .rest import BatchServiceRestInterceptor - +from .rest import BatchServiceRestInterceptor, BatchServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[BatchServiceTransport]] diff --git a/google/cloud/batch_v1alpha/services/batch_service/transports/base.py b/google/cloud/batch_v1alpha/services/batch_service/transports/base.py index 5840c64..07a98a1 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/transports/base.py +++ b/google/cloud/batch_v1alpha/services/batch_service/transports/base.py @@ -15,35 +15,28 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.batch_v1alpha import gapic_version as package_version from google.cloud.batch_v1alpha.types import batch from google.cloud.batch_v1alpha.types import job from google.cloud.batch_v1alpha.types import job as gcb_job from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-batch", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class BatchServiceTransport(abc.ABC): @@ -57,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py b/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py index 57e3ec8..8a83a3e 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py +++ b/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py @@ -13,28 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.batch_v1alpha.types import batch from google.cloud.batch_v1alpha.types import job from google.cloud.batch_v1alpha.types import job as gcb_job from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport class BatchServiceGrpcTransport(BatchServiceTransport): @@ -58,14 +55,14 @@ def __init__( self, *, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -193,8 +190,8 @@ def __init__( def create_channel( cls, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py b/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py index 8ead04f..a06f6ae 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py +++ b/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py @@ -13,15 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -29,12 +30,8 @@ from google.cloud.batch_v1alpha.types import job from google.cloud.batch_v1alpha.types import job as gcb_job from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BatchServiceTransport from .grpc import BatchServiceGrpcTransport @@ -60,7 +57,7 @@ class BatchServiceGrpcAsyncIOTransport(BatchServiceTransport): def create_channel( cls, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -103,15 +100,15 @@ def __init__( self, *, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py b/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py index 66766ed..2771f6e 100644 --- a/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py +++ b/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py @@ -14,29 +14,31 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -44,14 +46,15 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore + from google.cloud.batch_v1alpha.types import batch from google.cloud.batch_v1alpha.types import job from google.cloud.batch_v1alpha.types import job as gcb_job from google.cloud.batch_v1alpha.types import task -from google.longrunning import operations_pb2 # type: ignore - -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import BatchServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -59,6 +62,10 @@ rest_version=requests_version, ) +# TODO (numeric enums): This file was generated with the option to +# request that the server respond with enums JSON-encoded as +# numbers. The code below does not implement that functionality yet. + class BatchServiceRestInterceptor: """Interceptor for BatchService. @@ -471,19 +478,16 @@ class BatchServiceRestTransport(BatchServiceTransport): It sends JSON representations of protocol buffers over HTTP/1.1 - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! """ def __init__( self, *, host: str = "batch.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -493,39 +497,35 @@ def __init__( ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -600,6 +600,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: credentials=self._credentials, scopes=self._scopes, http_options=http_options, + path_prefix="v1alpha", ) self._operations_client = operations_v1.AbstractOperationsClient( @@ -628,7 +629,7 @@ def __call__( request: batch.CreateJobRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gcb_job.Job: r"""Call the create job method over HTTP. @@ -663,7 +664,7 @@ def __call__( body = json_format.MessageToJson( transcoded_request["body"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -673,11 +674,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -711,7 +714,7 @@ def __call__( request: batch.DeleteJobRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the delete job method over HTTP. @@ -751,10 +754,12 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -795,7 +800,7 @@ def __call__( request: batch.GetJobRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> job.Job: r"""Call the get job method over HTTP. @@ -832,11 +837,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -879,7 +886,7 @@ def __call__( request: batch.GetTaskRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> task.Task: r"""Call the get task method over HTTP. @@ -916,11 +923,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -953,7 +962,7 @@ def __call__( request: batch.ListJobsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> batch.ListJobsResponse: r"""Call the list jobs method over HTTP. @@ -990,10 +999,12 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -1036,7 +1047,7 @@ def __call__( request: batch.ListTasksRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> batch.ListTasksResponse: r"""Call the list tasks method over HTTP. @@ -1073,11 +1084,13 @@ def __call__( json_format.MessageToJson( transcoded_request["query_params"], including_default_value_fields=False, - use_integers_for_enums=False, + use_integers_for_enums=True, ) ) query_params.update(self._get_unset_required_fields(query_params)) + query_params["$alt"] = "json;enum-encoding=int" + # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" @@ -1149,7 +1162,7 @@ def __call__( request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: @@ -1216,7 +1229,7 @@ def __call__( request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: @@ -1283,7 +1296,7 @@ def __call__( request: iam_policy_pb2.GetIamPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1358,7 +1371,7 @@ def __call__( request: iam_policy_pb2.SetIamPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1438,7 +1451,7 @@ def __call__( request: iam_policy_pb2.TestIamPermissionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: @@ -1520,7 +1533,7 @@ def __call__( request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1586,7 +1599,7 @@ def __call__( request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1649,7 +1662,7 @@ def __call__( request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: @@ -1716,7 +1729,7 @@ def __call__( request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: diff --git a/google/cloud/batch_v1alpha/types/__init__.py b/google/cloud/batch_v1alpha/types/__init__.py index 0cef5e9..e2a5732 100644 --- a/google/cloud/batch_v1alpha/types/__init__.py +++ b/google/cloud/batch_v1alpha/types/__init__.py @@ -45,12 +45,7 @@ TaskSpec, TaskStatus, ) -from .volume import ( - GCS, - NFS, - PD, - Volume, -) +from .volume import GCS, NFS, PD, Volume __all__ = ( "CreateJobRequest", diff --git a/google/cloud/batch_v1alpha/types/batch.py b/google/cloud/batch_v1alpha/types/batch.py index 3811778..85b4494 100644 --- a/google/cloud/batch_v1alpha/types/batch.py +++ b/google/cloud/batch_v1alpha/types/batch.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.batch_v1alpha.types import job as gcb_job from google.cloud.batch_v1alpha.types import task -from google.protobuf import timestamp_pb2 # type: ignore - __protobuf__ = proto.module( package="google.cloud.batch.v1alpha", @@ -46,10 +47,11 @@ class CreateJobRequest(proto.Message): "projects/{project}/locations/{location}". job_id (str): ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters. Only - alphanumeric characters or '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. + scope. This field should contain at most 63 characters and + must start with lowercase characters. Only lowercase + characters, numbers and '-' are accepted. The '-' character + cannot be the first or the last one. A system generated ID + will be used if the field is not set. The job.name field in the request will be ignored and the created resource name of the Job will be @@ -76,20 +78,20 @@ class CreateJobRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - job_id = proto.Field( + job_id: str = proto.Field( proto.STRING, number=2, ) - job = proto.Field( + job: gcb_job.Job = proto.Field( proto.MESSAGE, number=3, message=gcb_job.Job, ) - request_id = proto.Field( + request_id: str = proto.Field( proto.STRING, number=4, ) @@ -103,7 +105,7 @@ class GetJobRequest(proto.Message): Required. Job name. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -137,15 +139,15 @@ class DeleteJobRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - reason = proto.Field( + reason: str = proto.Field( proto.STRING, number=2, ) - request_id = proto.Field( + request_id: str = proto.Field( proto.STRING, number=4, ) @@ -165,19 +167,19 @@ class ListJobsRequest(proto.Message): Page token. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=4, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -187,11 +189,11 @@ class ListJobsResponse(proto.Message): r"""ListJob Response. Attributes: - jobs (Sequence[google.cloud.batch_v1alpha.types.Job]): + jobs (MutableSequence[google.cloud.batch_v1alpha.types.Job]): Jobs. next_page_token (str): Next page token. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -199,16 +201,16 @@ class ListJobsResponse(proto.Message): def raw_page(self): return self - jobs = proto.RepeatedField( + jobs: MutableSequence[gcb_job.Job] = proto.RepeatedField( proto.MESSAGE, number=1, message=gcb_job.Job, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -232,19 +234,19 @@ class ListTasksRequest(proto.Message): Page token. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -254,11 +256,11 @@ class ListTasksResponse(proto.Message): r"""ListTasks Response. Attributes: - tasks (Sequence[google.cloud.batch_v1alpha.types.Task]): + tasks (MutableSequence[google.cloud.batch_v1alpha.types.Task]): Tasks. next_page_token (str): Next page token. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -266,16 +268,16 @@ class ListTasksResponse(proto.Message): def raw_page(self): return self - tasks = proto.RepeatedField( + tasks: MutableSequence[task.Task] = proto.RepeatedField( proto.MESSAGE, number=1, message=task.Task, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -289,7 +291,7 @@ class GetTaskRequest(proto.Message): Required. Task name. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -325,33 +327,33 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_message = proto.Field( + status_message: str = proto.Field( proto.STRING, number=5, ) - requested_cancellation = proto.Field( + requested_cancellation: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) diff --git a/google/cloud/batch_v1alpha/types/job.py b/google/cloud/batch_v1alpha/types/job.py index d6a982a..e4969b9 100644 --- a/google/cloud/batch_v1alpha/types/job.py +++ b/google/cloud/batch_v1alpha/types/job.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from typing import MutableMapping, MutableSequence -from google.cloud.batch_v1alpha.types import task from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.batch_v1alpha.types import task __protobuf__ = proto.module( package="google.cloud.batch.v1alpha", @@ -50,12 +51,12 @@ class Job(proto.Message): Priority of the Job. The valid value range is [0, 100). A job with higher priority value is more likely to run earlier if all other requirements are satisfied. - task_groups (Sequence[google.cloud.batch_v1alpha.types.TaskGroup]): + task_groups (MutableSequence[google.cloud.batch_v1alpha.types.TaskGroup]): Required. TaskGroups in the Job. Only one TaskGroup is supported now. scheduling_policy (google.cloud.batch_v1alpha.types.Job.SchedulingPolicy): Scheduling policy for TaskGroups in the job. - dependencies (Sequence[google.cloud.batch_v1alpha.types.JobDependency]): + dependencies (MutableSequence[google.cloud.batch_v1alpha.types.JobDependency]): At least one of the dependencies must be satisfied before the Job is scheduled to run. Only one JobDependency is supported now. @@ -63,7 +64,7 @@ class Job(proto.Message): allocation_policy (google.cloud.batch_v1alpha.types.AllocationPolicy): Compute resource allocation for all TaskGroups in the Job. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels for the Job. Labels could be user provided or system generated. For example, "labels": { "department": "finance", "environment": "test" } You can assign up to 64 labels. @@ -83,7 +84,7 @@ class Job(proto.Message): updated. logs_policy (google.cloud.batch_v1alpha.types.LogsPolicy): Log preservation policy for the Job. - notifications (Sequence[google.cloud.batch_v1alpha.types.JobNotification]): + notifications (MutableSequence[google.cloud.batch_v1alpha.types.JobNotification]): Notification configurations. """ @@ -95,69 +96,69 @@ class SchedulingPolicy(proto.Enum): SCHEDULING_POLICY_UNSPECIFIED = 0 AS_SOON_AS_POSSIBLE = 1 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - uid = proto.Field( + uid: str = proto.Field( proto.STRING, number=2, ) - priority = proto.Field( + priority: int = proto.Field( proto.INT64, number=3, ) - task_groups = proto.RepeatedField( + task_groups: MutableSequence["TaskGroup"] = proto.RepeatedField( proto.MESSAGE, number=4, message="TaskGroup", ) - scheduling_policy = proto.Field( + scheduling_policy: SchedulingPolicy = proto.Field( proto.ENUM, number=5, enum=SchedulingPolicy, ) - dependencies = proto.RepeatedField( + dependencies: MutableSequence["JobDependency"] = proto.RepeatedField( proto.MESSAGE, number=6, message="JobDependency", ) - allocation_policy = proto.Field( + allocation_policy: "AllocationPolicy" = proto.Field( proto.MESSAGE, number=7, message="AllocationPolicy", ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=8, ) - status = proto.Field( + status: "JobStatus" = proto.Field( proto.MESSAGE, number=9, message="JobStatus", ) - notification = proto.Field( + notification: "JobNotification" = proto.Field( proto.MESSAGE, number=10, message="JobNotification", ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=12, message=timestamp_pb2.Timestamp, ) - logs_policy = proto.Field( + logs_policy: "LogsPolicy" = proto.Field( proto.MESSAGE, number=13, message="LogsPolicy", ) - notifications = proto.RepeatedField( + notifications: MutableSequence["JobNotification"] = proto.RepeatedField( proto.MESSAGE, number=14, message="JobNotification", @@ -185,12 +186,12 @@ class Destination(proto.Enum): CLOUD_LOGGING = 1 PATH = 2 - destination = proto.Field( + destination: Destination = proto.Field( proto.ENUM, number=1, enum=Destination, ) - logs_path = proto.Field( + logs_path: str = proto.Field( proto.STRING, number=2, ) @@ -202,7 +203,7 @@ class JobDependency(proto.Message): All dependent Jobs must have been submitted in the same region. Attributes: - items (Mapping[str, google.cloud.batch_v1alpha.types.JobDependency.Type]): + items (MutableMapping[str, google.cloud.batch_v1alpha.types.JobDependency.Type]): Each item maps a Job name to a Type. All items must be satisfied for the JobDependency to be satisfied (the AND @@ -219,7 +220,7 @@ class Type(proto.Enum): FAILED = 2 FINISHED = 3 - items = proto.MapField( + items: MutableMapping[str, Type] = proto.MapField( proto.STRING, proto.ENUM, number=1, @@ -233,9 +234,9 @@ class JobStatus(proto.Message): Attributes: state (google.cloud.batch_v1alpha.types.JobStatus.State): Job state - status_events (Sequence[google.cloud.batch_v1alpha.types.StatusEvent]): + status_events (MutableSequence[google.cloud.batch_v1alpha.types.StatusEvent]): Job status events - task_groups (Mapping[str, google.cloud.batch_v1alpha.types.JobStatus.TaskGroupStatus]): + task_groups (MutableMapping[str, google.cloud.batch_v1alpha.types.JobStatus.TaskGroupStatus]): Aggregated task status for each TaskGroup in the Job. The map key is TaskGroup ID. run_duration (google.protobuf.duration_pb2.Duration): @@ -266,16 +267,16 @@ class InstanceStatus(proto.Message): this instance type. """ - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=1, ) - provisioning_model = proto.Field( + provisioning_model: "AllocationPolicy.ProvisioningModel" = proto.Field( proto.ENUM, number=2, enum="AllocationPolicy.ProvisioningModel", ) - task_pack = proto.Field( + task_pack: int = proto.Field( proto.INT64, number=3, ) @@ -284,42 +285,42 @@ class TaskGroupStatus(proto.Message): r"""Aggregated task status for a TaskGroup. Attributes: - counts (Mapping[str, int]): + counts (MutableMapping[str, int]): Count of task in each state in the TaskGroup. The map key is task state name. - instances (Sequence[google.cloud.batch_v1alpha.types.JobStatus.InstanceStatus]): + instances (MutableSequence[google.cloud.batch_v1alpha.types.JobStatus.InstanceStatus]): Status of instances allocated for the TaskGroup. """ - counts = proto.MapField( + counts: MutableMapping[str, int] = proto.MapField( proto.STRING, proto.INT64, number=1, ) - instances = proto.RepeatedField( + instances: MutableSequence["JobStatus.InstanceStatus"] = proto.RepeatedField( proto.MESSAGE, number=2, message="JobStatus.InstanceStatus", ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=1, enum=State, ) - status_events = proto.RepeatedField( + status_events: MutableSequence[task.StatusEvent] = proto.RepeatedField( proto.MESSAGE, number=2, message=task.StatusEvent, ) - task_groups = proto.MapField( + task_groups: MutableMapping[str, TaskGroupStatus] = proto.MapField( proto.STRING, proto.MESSAGE, number=4, message=TaskGroupStatus, ) - run_duration = proto.Field( + run_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=5, message=duration_pb2.Duration, @@ -363,27 +364,27 @@ class Message(proto.Message): The new task state. """ - type_ = proto.Field( + type_: "JobNotification.Type" = proto.Field( proto.ENUM, number=1, enum="JobNotification.Type", ) - new_job_state = proto.Field( + new_job_state: "JobStatus.State" = proto.Field( proto.ENUM, number=2, enum="JobStatus.State", ) - new_task_state = proto.Field( + new_task_state: task.TaskStatus.State = proto.Field( proto.ENUM, number=3, enum=task.TaskStatus.State, ) - pubsub_topic = proto.Field( + pubsub_topic: str = proto.Field( proto.STRING, number=1, ) - message = proto.Field( + message: Message = proto.Field( proto.MESSAGE, number=2, message=Message, @@ -400,19 +401,19 @@ class AllocationPolicy(proto.Message): allocated for the Job. instance (google.cloud.batch_v1alpha.types.AllocationPolicy.InstancePolicy): Deprecated: please use instances[0].policy instead. - instances (Sequence[google.cloud.batch_v1alpha.types.AllocationPolicy.InstancePolicyOrTemplate]): + instances (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.InstancePolicyOrTemplate]): Describe instances that can be created by this AllocationPolicy. Only instances[0] is supported now. - instance_templates (Sequence[str]): + instance_templates (MutableSequence[str]): Deprecated: please use instances[0].template instead. - provisioning_models (Sequence[google.cloud.batch_v1alpha.types.AllocationPolicy.ProvisioningModel]): + provisioning_models (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.ProvisioningModel]): Deprecated: please use - instances[i].policy.provisioning_model instead. + instances[0].policy.provisioning_model instead. service_account_email (str): Deprecated: please use service_account instead. service_account (google.cloud.batch_v1alpha.types.ServiceAccount): Service account that VMs will run as. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels applied to all VM instances and other resources created by AllocationPolicy. Labels could be user provided or system generated. You can assign up to 64 labels. `Google @@ -435,7 +436,7 @@ class LocationPolicy(proto.Message): r""" Attributes: - allowed_locations (Sequence[str]): + allowed_locations (MutableSequence[str]): A list of allowed location names represented by internal URLs. Each location can be a region or a zone. Only one region or multiple zones in one region is supported now. For @@ -447,16 +448,16 @@ class LocationPolicy(proto.Message): "zones/us-central1-a", "zones/us-central1-b", "zones/us-west1-a"] contains 2 regions "us-central1" and "us-west1". An error is expected in this case. - denied_locations (Sequence[str]): + denied_locations (MutableSequence[str]): A list of denied location names. Not yet implemented. """ - allowed_locations = proto.RepeatedField( + allowed_locations: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - denied_locations = proto.RepeatedField( + denied_locations: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) @@ -500,32 +501,32 @@ class Disk(proto.Message): support "SCSI" for persistent disks now. """ - image = proto.Field( + image: str = proto.Field( proto.STRING, number=4, oneof="data_source", ) - snapshot = proto.Field( + snapshot: str = proto.Field( proto.STRING, number=5, oneof="data_source", ) - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=1, ) - size_gb = proto.Field( + size_gb: int = proto.Field( proto.INT64, number=2, ) - disk_interface = proto.Field( + disk_interface: str = proto.Field( proto.STRING, number=6, ) class AttachedDisk(proto.Message): - r"""A new or an existing persistent disk or a local ssd attached - to a VM instance. + r"""A new or an existing persistent disk (PD) or a local ssd + attached to a VM instance. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -549,25 +550,25 @@ class AttachedDisk(proto.Message): disk, and it should match the device_name field in volumes. """ - new_disk = proto.Field( + new_disk: "AllocationPolicy.Disk" = proto.Field( proto.MESSAGE, number=1, oneof="attached", message="AllocationPolicy.Disk", ) - existing_disk = proto.Field( + existing_disk: str = proto.Field( proto.STRING, number=2, oneof="attached", ) - device_name = proto.Field( + device_name: str = proto.Field( proto.STRING, number=3, ) class Accelerator(proto.Message): r"""Accelerator describes Compute Engine accelerators to be - attached to VMs. + attached to the VM. Attributes: type_ (str): @@ -580,15 +581,15 @@ class Accelerator(proto.Message): instead. """ - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=1, ) - count = proto.Field( + count: int = proto.Field( proto.INT64, number=2, ) - install_gpu_drivers = proto.Field( + install_gpu_drivers: bool = proto.Field( proto.BOOL, number=3, ) @@ -598,7 +599,7 @@ class InstancePolicy(proto.Message): attached to each VM created by this InstancePolicy. Attributes: - allowed_machine_types (Sequence[str]): + allowed_machine_types (MutableSequence[str]): Deprecated: please use machine_type instead. machine_type (str): The Compute Engine machine type. @@ -608,42 +609,51 @@ class InstancePolicy(proto.Message): Not yet implemented. provisioning_model (google.cloud.batch_v1alpha.types.AllocationPolicy.ProvisioningModel): The provisioning model. - accelerators (Sequence[google.cloud.batch_v1alpha.types.AllocationPolicy.Accelerator]): + accelerators (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.Accelerator]): The accelerators attached to each VM - instance. Not yet implemented. - disks (Sequence[google.cloud.batch_v1alpha.types.AllocationPolicy.AttachedDisk]): + instance. + disks (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.AttachedDisk]): Non-boot disks to be attached for each VM created by this InstancePolicy. New disks will - be deleted when the attached VM is deleted. + be deleted when the VM is deleted. + reservation (str): + If specified, VMs will be allocated only + inside the matching reservation. """ - allowed_machine_types = proto.RepeatedField( + allowed_machine_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - machine_type = proto.Field( + machine_type: str = proto.Field( proto.STRING, number=2, ) - min_cpu_platform = proto.Field( + min_cpu_platform: str = proto.Field( proto.STRING, number=3, ) - provisioning_model = proto.Field( + provisioning_model: "AllocationPolicy.ProvisioningModel" = proto.Field( proto.ENUM, number=4, enum="AllocationPolicy.ProvisioningModel", ) - accelerators = proto.RepeatedField( + accelerators: MutableSequence[ + "AllocationPolicy.Accelerator" + ] = proto.RepeatedField( proto.MESSAGE, number=5, message="AllocationPolicy.Accelerator", ) - disks = proto.RepeatedField( + disks: MutableSequence["AllocationPolicy.AttachedDisk"] = proto.RepeatedField( proto.MESSAGE, number=6, message="AllocationPolicy.AttachedDisk", ) + reservation: str = proto.Field( + proto.STRING, + number=7, + ) class InstancePolicyOrTemplate(proto.Message): r"""Either an InstancePolicy or an instance template. @@ -673,18 +683,18 @@ class InstancePolicyOrTemplate(proto.Message): on their behalf. Default is false. """ - policy = proto.Field( + policy: "AllocationPolicy.InstancePolicy" = proto.Field( proto.MESSAGE, number=1, oneof="policy_template", message="AllocationPolicy.InstancePolicy", ) - instance_template = proto.Field( + instance_template: str = proto.Field( proto.STRING, number=2, oneof="policy_template", ) - install_gpu_drivers = proto.Field( + install_gpu_drivers: bool = proto.Field( proto.BOOL, number=3, ) @@ -710,15 +720,15 @@ class NetworkInterface(proto.Message): for more information. """ - network = proto.Field( + network: str = proto.Field( proto.STRING, number=1, ) - subnetwork = proto.Field( + subnetwork: str = proto.Field( proto.STRING, number=2, ) - no_external_ip_address = proto.Field( + no_external_ip_address: bool = proto.Field( proto.BOOL, number=3, ) @@ -727,55 +737,57 @@ class NetworkPolicy(proto.Message): r"""NetworkPolicy describes VM instance network configurations. Attributes: - network_interfaces (Sequence[google.cloud.batch_v1alpha.types.AllocationPolicy.NetworkInterface]): + network_interfaces (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.NetworkInterface]): Network configurations. """ - network_interfaces = proto.RepeatedField( + network_interfaces: MutableSequence[ + "AllocationPolicy.NetworkInterface" + ] = proto.RepeatedField( proto.MESSAGE, number=1, message="AllocationPolicy.NetworkInterface", ) - location = proto.Field( + location: LocationPolicy = proto.Field( proto.MESSAGE, number=1, message=LocationPolicy, ) - instance = proto.Field( + instance: InstancePolicy = proto.Field( proto.MESSAGE, number=2, message=InstancePolicy, ) - instances = proto.RepeatedField( + instances: MutableSequence[InstancePolicyOrTemplate] = proto.RepeatedField( proto.MESSAGE, number=8, message=InstancePolicyOrTemplate, ) - instance_templates = proto.RepeatedField( + instance_templates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - provisioning_models = proto.RepeatedField( + provisioning_models: MutableSequence[ProvisioningModel] = proto.RepeatedField( proto.ENUM, number=4, enum=ProvisioningModel, ) - service_account_email = proto.Field( + service_account_email: str = proto.Field( proto.STRING, number=5, ) - service_account = proto.Field( + service_account: "ServiceAccount" = proto.Field( proto.MESSAGE, number=9, message="ServiceAccount", ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, ) - network = proto.Field( + network: NetworkPolicy = proto.Field( proto.MESSAGE, number=7, message=NetworkPolicy, @@ -807,14 +819,14 @@ class TaskGroup(proto.Message): Compute resource allocation for the TaskGroup. If specified, it overrides resources in Job. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels for the TaskGroup. Labels could be user provided or system generated. You can assign up to 64 labels. `Google Compute Engine label restrictions `__ apply. Label names that start with "goog-" or "google-" are reserved. - task_environments (Sequence[google.cloud.batch_v1alpha.types.Environment]): + task_environments (MutableSequence[google.cloud.batch_v1alpha.types.Environment]): An array of environment variable mappings, which are passed to Tasks with matching indices. If task_environments is used then task_count should not be specified in the request (and @@ -852,52 +864,52 @@ class SchedulingPolicy(proto.Enum): SCHEDULING_POLICY_UNSPECIFIED = 0 AS_SOON_AS_POSSIBLE = 1 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - task_spec = proto.Field( + task_spec: task.TaskSpec = proto.Field( proto.MESSAGE, number=3, message=task.TaskSpec, ) - task_count = proto.Field( + task_count: int = proto.Field( proto.INT64, number=4, ) - parallelism = proto.Field( + parallelism: int = proto.Field( proto.INT64, number=5, ) - scheduling_policy = proto.Field( + scheduling_policy: SchedulingPolicy = proto.Field( proto.ENUM, number=6, enum=SchedulingPolicy, ) - allocation_policy = proto.Field( + allocation_policy: "AllocationPolicy" = proto.Field( proto.MESSAGE, number=7, message="AllocationPolicy", ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=8, ) - task_environments = proto.RepeatedField( + task_environments: MutableSequence[task.Environment] = proto.RepeatedField( proto.MESSAGE, number=9, message=task.Environment, ) - task_count_per_node = proto.Field( + task_count_per_node: int = proto.Field( proto.INT64, number=10, ) - require_hosts_file = proto.Field( + require_hosts_file: bool = proto.Field( proto.BOOL, number=11, ) - permissive_ssh = proto.Field( + permissive_ssh: bool = proto.Field( proto.BOOL, number=12, ) @@ -915,18 +927,18 @@ class ServiceAccount(proto.Message): account has to be specified in the instance template and it has to match the email field here. - scopes (Sequence[str]): + scopes (MutableSequence[str]): List of scopes to be enabled for this service account on the VM, in addition to the cloud-platform API scope that will be added by default. """ - email = proto.Field( + email: str = proto.Field( proto.STRING, number=1, ) - scopes = proto.RepeatedField( + scopes: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) diff --git a/google/cloud/batch_v1alpha/types/task.py b/google/cloud/batch_v1alpha/types/task.py index 51d7d95..b6405df 100644 --- a/google/cloud/batch_v1alpha/types/task.py +++ b/google/cloud/batch_v1alpha/types/task.py @@ -13,12 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from typing import MutableMapping, MutableSequence -from google.cloud.batch_v1alpha.types import volume from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.batch_v1alpha.types import volume __protobuf__ = proto.module( package="google.cloud.batch.v1alpha", @@ -51,19 +52,19 @@ class ComputeResource(proto.Message): Extra boot disk size in MiB for each task. """ - cpu_milli = proto.Field( + cpu_milli: int = proto.Field( proto.INT64, number=1, ) - memory_mib = proto.Field( + memory_mib: int = proto.Field( proto.INT64, number=2, ) - gpu_count = proto.Field( + gpu_count: int = proto.Field( proto.INT64, number=3, ) - boot_disk_mib = proto.Field( + boot_disk_mib: int = proto.Field( proto.INT64, number=4, ) @@ -83,20 +84,20 @@ class StatusEvent(proto.Message): Task Execution """ - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=3, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=1, ) - event_time = proto.Field( + event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - task_execution = proto.Field( + task_execution: "TaskExecution" = proto.Field( proto.MESSAGE, number=4, message="TaskExecution", @@ -114,7 +115,7 @@ class TaskExecution(proto.Message): execution result, default is 0 as success. """ - exit_code = proto.Field( + exit_code: int = proto.Field( proto.INT32, number=1, ) @@ -126,7 +127,7 @@ class TaskStatus(proto.Message): Attributes: state (google.cloud.batch_v1alpha.types.TaskStatus.State): Task state - status_events (Sequence[google.cloud.batch_v1alpha.types.StatusEvent]): + status_events (MutableSequence[google.cloud.batch_v1alpha.types.StatusEvent]): Detailed info about why the state is reached. """ @@ -139,12 +140,12 @@ class State(proto.Enum): FAILED = 4 SUCCEEDED = 5 - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=1, enum=State, ) - status_events = proto.RepeatedField( + status_events: MutableSequence["StatusEvent"] = proto.RepeatedField( proto.MESSAGE, number=2, message="StatusEvent", @@ -203,7 +204,7 @@ class Runnable(proto.Message): TaskGroup). timeout (google.protobuf.duration_pb2.Duration): Timeout for this Runnable. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels for this Runnable. """ @@ -213,14 +214,14 @@ class Container(proto.Message): Attributes: image_uri (str): The URI to pull the container image from. - commands (Sequence[str]): + commands (MutableSequence[str]): Overrides the ``CMD`` specified in the container. If there is an ENTRYPOINT (either in the container image or with the entrypoint field below) then commands are appended as arguments to the ENTRYPOINT. entrypoint (str): Overrides the ``ENTRYPOINT`` specified in the container. - volumes (Sequence[str]): + volumes (MutableSequence[str]): Volumes to mount (bind mount) from the host machine files or directories into the container, formatted to match docker run's --volume option, @@ -236,43 +237,43 @@ class Container(proto.Message): 'goog-internal'. username (str): Optional username for logging in to a docker registry. If - username matches `projects/*/secrets/*/versions/*` then + username matches ``projects/*/secrets/*/versions/*`` then Batch will read the username from the Secret Manager. password (str): Optional password for logging in to a docker registry. If - password matches `projects/*/secrets/*/versions/*` then + password matches ``projects/*/secrets/*/versions/*`` then Batch will read the password from the Secret Manager; """ - image_uri = proto.Field( + image_uri: str = proto.Field( proto.STRING, number=1, ) - commands = proto.RepeatedField( + commands: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - entrypoint = proto.Field( + entrypoint: str = proto.Field( proto.STRING, number=3, ) - volumes = proto.RepeatedField( + volumes: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=7, ) - options = proto.Field( + options: str = proto.Field( proto.STRING, number=8, ) - block_external_network = proto.Field( + block_external_network: bool = proto.Field( proto.BOOL, number=9, ) - username = proto.Field( + username: str = proto.Field( proto.STRING, number=10, ) - password = proto.Field( + password: str = proto.Field( proto.STRING, number=11, ) @@ -298,12 +299,12 @@ class Script(proto.Message): This field is a member of `oneof`_ ``command``. """ - path = proto.Field( + path: str = proto.Field( proto.STRING, number=1, oneof="command", ) - text = proto.Field( + text: str = proto.Field( proto.STRING, number=2, oneof="command", @@ -320,52 +321,52 @@ class Barrier(proto.Message): present should be an identifier. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - container = proto.Field( + container: Container = proto.Field( proto.MESSAGE, number=1, oneof="executable", message=Container, ) - script = proto.Field( + script: Script = proto.Field( proto.MESSAGE, number=2, oneof="executable", message=Script, ) - barrier = proto.Field( + barrier: Barrier = proto.Field( proto.MESSAGE, number=6, oneof="executable", message=Barrier, ) - ignore_exit_status = proto.Field( + ignore_exit_status: bool = proto.Field( proto.BOOL, number=3, ) - background = proto.Field( + background: bool = proto.Field( proto.BOOL, number=4, ) - always_run = proto.Field( + always_run: bool = proto.Field( proto.BOOL, number=5, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=7, message="Environment", ) - timeout = proto.Field( + timeout: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=8, message=duration_pb2.Duration, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=9, @@ -376,7 +377,7 @@ class TaskSpec(proto.Message): r"""Spec of a task Attributes: - runnables (Sequence[google.cloud.batch_v1alpha.types.Runnable]): + runnables (MutableSequence[google.cloud.batch_v1alpha.types.Runnable]): The sequence of scripts or containers to run for this Task. Each Task using this TaskSpec executes its list of runnables in order. The Task succeeds if all of its runnables either @@ -398,7 +399,7 @@ class TaskSpec(proto.Message): max_retry_count (int): Maximum number of retries on failures. The default, 0, which means never retry. The valid value range is [0, 10]. - lifecycle_policies (Sequence[google.cloud.batch_v1alpha.types.LifecyclePolicy]): + lifecycle_policies (MutableSequence[google.cloud.batch_v1alpha.types.LifecyclePolicy]): Lifecycle management schema when any task in a task group is failed. The valid size of lifecycle policies are [0, 10]. For each lifecycle policy, when the condition is met, the @@ -409,10 +410,10 @@ class TaskSpec(proto.Message): policy, we consider it as the default policy. Default policy means if the exit code is 0, exit task. If task ends with non-zero exit code, retry the task with max_retry_count. - environments (Mapping[str, str]): + environments (MutableMapping[str, str]): Environment variables to set before running the Task. You can set up to 100 environments. - volumes (Sequence[google.cloud.batch_v1alpha.types.Volume]): + volumes (MutableSequence[google.cloud.batch_v1alpha.types.Volume]): Volumes to mount before running Tasks using this TaskSpec. environment (google.cloud.batch_v1alpha.types.Environment): @@ -420,41 +421,41 @@ class TaskSpec(proto.Message): the Task. """ - runnables = proto.RepeatedField( + runnables: MutableSequence["Runnable"] = proto.RepeatedField( proto.MESSAGE, number=8, message="Runnable", ) - compute_resource = proto.Field( + compute_resource: "ComputeResource" = proto.Field( proto.MESSAGE, number=3, message="ComputeResource", ) - max_run_duration = proto.Field( + max_run_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=4, message=duration_pb2.Duration, ) - max_retry_count = proto.Field( + max_retry_count: int = proto.Field( proto.INT32, number=5, ) - lifecycle_policies = proto.RepeatedField( + lifecycle_policies: MutableSequence["LifecyclePolicy"] = proto.RepeatedField( proto.MESSAGE, number=9, message="LifecyclePolicy", ) - environments = proto.MapField( + environments: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, ) - volumes = proto.RepeatedField( + volumes: MutableSequence[volume.Volume] = proto.RepeatedField( proto.MESSAGE, number=7, message=volume.Volume, ) - environment = proto.Field( + environment: "Environment" = proto.Field( proto.MESSAGE, number=10, message="Environment", @@ -484,7 +485,7 @@ class ActionCondition(proto.Message): r"""Conditions for actions to deal with task failures. Attributes: - exit_codes (Sequence[int]): + exit_codes (MutableSequence[int]): Exit codes of a task execution. If there are more than 1 exit codes, when task executes with any of the exit code in @@ -492,17 +493,17 @@ class ActionCondition(proto.Message): will be executed. """ - exit_codes = proto.RepeatedField( + exit_codes: MutableSequence[int] = proto.RepeatedField( proto.INT32, number=1, ) - action = proto.Field( + action: Action = proto.Field( proto.ENUM, number=1, enum=Action, ) - action_condition = proto.Field( + action_condition: ActionCondition = proto.Field( proto.MESSAGE, number=2, message=ActionCondition, @@ -522,11 +523,11 @@ class Task(proto.Message): Task Status. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - status = proto.Field( + status: "TaskStatus" = proto.Field( proto.MESSAGE, number=2, message="TaskStatus", @@ -538,10 +539,10 @@ class Environment(proto.Message): variables to set when executing Tasks. Attributes: - variables (Mapping[str, str]): + variables (MutableMapping[str, str]): A map of environment variable names to values. - secret_variables (Mapping[str, str]): + secret_variables (MutableMapping[str, str]): A map of environment variable names to Secret Manager secret names. The VM will access the named secrets to set the value of each @@ -564,26 +565,26 @@ class KMSEnvMap(proto.Message): method. """ - key_name = proto.Field( + key_name: str = proto.Field( proto.STRING, number=1, ) - cipher_text = proto.Field( + cipher_text: str = proto.Field( proto.STRING, number=2, ) - variables = proto.MapField( + variables: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=1, ) - secret_variables = proto.MapField( + secret_variables: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - encrypted_variables = proto.Field( + encrypted_variables: KMSEnvMap = proto.Field( proto.MESSAGE, number=3, message=KMSEnvMap, diff --git a/google/cloud/batch_v1alpha/types/volume.py b/google/cloud/batch_v1alpha/types/volume.py index 11ac3e8..a379dfc 100644 --- a/google/cloud/batch_v1alpha/types/volume.py +++ b/google/cloud/batch_v1alpha/types/volume.py @@ -13,8 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from typing import MutableMapping, MutableSequence +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.batch.v1alpha", @@ -28,9 +29,8 @@ class Volume(proto.Message): - r"""Volume and mount parameters to be associated with a TaskSpec. - A TaskSpec might describe zero, one, or multiple volumes to be - mounted as part of the task. + r"""Volume describes a volume and parameters for it to be mounted + to a VM. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -41,90 +41,101 @@ class Volume(proto.Message): Attributes: nfs (google.cloud.batch_v1alpha.types.NFS): - An NFS source for the volume (could be a - Filestore, for example). + A Network File System (NFS) volume. For + example, a Filestore file share. This field is a member of `oneof`_ ``source``. pd (google.cloud.batch_v1alpha.types.PD): - A persistent disk source for the volume. + Deprecated: please use device_name instead. This field is a member of `oneof`_ ``source``. gcs (google.cloud.batch_v1alpha.types.GCS): - A Google Cloud Storage source for the volume. + A Google Cloud Storage (GCS) volume. This field is a member of `oneof`_ ``source``. device_name (str): - Device name of an attached disk + Device name of an attached disk volume, which should align + with a device_name specified by + job.allocation_policy.instances[0].policy.disks[i].device_name + or defined by the given instance template in + job.allocation_policy.instances[0].instance_template. This field is a member of `oneof`_ ``source``. mount_path (str): - Mount path for the volume, e.g. /mnt/share - mount_options (Sequence[str]): - Mount options For Google Cloud Storage, mount options are - the global options supported by gcsfuse tool. Batch will use - them to mount the volume with the following command: - "gcsfuse [global options] bucket mountpoint". For PD, NFS, - mount options are these supported by /etc/fstab. Batch will - use Fstab to mount such volumes. - https://help.ubuntu.com/community/Fstab + The mount path for the volume, e.g. + /mnt/disks/share. + mount_options (MutableSequence[str]): + For Google Cloud Storage (GCS), mount options + are the options supported by the gcsfuse tool + (https://github.com/GoogleCloudPlatform/gcsfuse). + For existing persistent disks, mount options + provided by the mount command + (https://man7.org/linux/man-pages/man8/mount.8.html) + except writing are supported. This is due to + restrictions of multi-writer mode + (https://cloud.google.com/compute/docs/disks/sharing-disks-between-vms). + For other attached disks and Network File System + (NFS), mount options are these supported by the + mount command + (https://man7.org/linux/man-pages/man8/mount.8.html). """ - nfs = proto.Field( + nfs: "NFS" = proto.Field( proto.MESSAGE, number=1, oneof="source", message="NFS", ) - pd = proto.Field( + pd: "PD" = proto.Field( proto.MESSAGE, number=2, oneof="source", message="PD", ) - gcs = proto.Field( + gcs: "GCS" = proto.Field( proto.MESSAGE, number=3, oneof="source", message="GCS", ) - device_name = proto.Field( + device_name: str = proto.Field( proto.STRING, number=6, oneof="source", ) - mount_path = proto.Field( + mount_path: str = proto.Field( proto.STRING, number=4, ) - mount_options = proto.RepeatedField( + mount_options: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) class NFS(proto.Message): - r"""Represents an NFS server and remote path: : + r"""Represents an NFS volume. Attributes: server (str): - URI of the NFS server, e.g. an IP address. + The IP address of the NFS. remote_path (str): - Remote source path exported from NFS, e.g., - "/share". + Remote source path exported from the NFS, + e.g., "/share". """ - server = proto.Field( + server: str = proto.Field( proto.STRING, number=1, ) - remote_path = proto.Field( + remote_path: str = proto.Field( proto.STRING, number=2, ) class PD(proto.Message): - r"""Represents a GCP persistent disk + r"""Deprecated: please use device_name instead. Attributes: disk (str): @@ -139,22 +150,22 @@ class PD(proto.Message): and we will mount it to the given path. """ - disk = proto.Field( + disk: str = proto.Field( proto.STRING, number=1, ) - device = proto.Field( + device: str = proto.Field( proto.STRING, number=2, ) - existing = proto.Field( + existing: bool = proto.Field( proto.BOOL, number=3, ) class GCS(proto.Message): - r"""Represents a Google Cloud Storage volume source config. + r"""Represents a Google Cloud Storage volume. Attributes: remote_path (str): @@ -162,7 +173,7 @@ class GCS(proto.Message): bucket, e.g.: bucket_name, bucket_name/subdirectory/ """ - remote_path = proto.Field( + remote_path: str = proto.Field( proto.STRING, number=1, ) diff --git a/noxfile.py b/noxfile.py index 5f898a7..d8440c0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,6 +17,7 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re @@ -272,12 +273,16 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -294,13 +299,16 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/owlbot.py b/owlbot.py index ef317d8..ce738f0 100644 --- a/owlbot.py +++ b/owlbot.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json from pathlib import Path +import shutil import synthtool as s import synthtool.gcp as gcp @@ -22,16 +24,18 @@ # Copy the generated client from the owl-bot staging directory # ---------------------------------------------------------------------------- -default_version = "v1" +clean_up_generated_samples = True -for library in s.get_staging_dirs(default_version): - # work around issue with docstring - s.replace(library / "google/cloud/**/*.py", - """\"projects/\*/secrets/\*/versions/\*\"""", - """`projects/*/secrets/*/versions/*`""", - ) +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) - s.move(library, excludes=["google/cloud/batch/", "setup.py"]) +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + s.move([library], excludes=["**/gapic_version.py"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- @@ -39,14 +43,14 @@ # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( + cov_level=100, microgenerator=True, versions=gcp.common.detect_versions(path="./google", default_first=True), ) -s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file +s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml"]) python.py_samples(skip_readmes=True) - -# run blacken session for all directories which have a noxfile +# run format session for all directories which have a noxfile for noxfile in Path(".").glob("**/noxfile.py"): - s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) + s.shell.run(["nox", "-s", "format"], cwd=noxfile.parent, hide_output=False) diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 0000000..1fe9fd1 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,30 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/batch_v1/gapic_version.py", + "google/cloud/batch_v1alpha/gapic_version.py", + "google/cloud/batch/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json", + "jsonpath": "$.clientLibrary.version" + }, + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} diff --git a/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync_2bc66fef.py b/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync_2bc66fef.py deleted file mode 100644 index e3ae8a7..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync_2bc66fef.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_CreateJob_sync_2bc66fef] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_create_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_CreateJob_sync_2bc66fef] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync_6f38dd76.py b/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync_6f38dd76.py deleted file mode 100644 index d746f6c..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync_6f38dd76.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_CreateJob_sync_6f38dd76] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_create_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_CreateJob_sync_6f38dd76] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py b/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py index 0af8acf..9897ff7 100644 --- a/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py +++ b/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py @@ -47,7 +47,7 @@ async def sample_delete_job(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync_0196009c.py b/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync_0196009c.py deleted file mode 100644 index b00884b..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync_0196009c.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_DeleteJob_sync_0196009c] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_delete_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_DeleteJob_sync_0196009c] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync_91684552.py b/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync_91684552.py deleted file mode 100644 index 664bb1f..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync_91684552.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_DeleteJob_sync_91684552] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_delete_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_DeleteJob_sync_91684552] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync_97ad7e29.py b/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync_97ad7e29.py deleted file mode 100644 index 7f399fb..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync_97ad7e29.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetJob_sync_97ad7e29] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_get_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetJob_sync_97ad7e29] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync_b06cc31b.py b/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync_b06cc31b.py deleted file mode 100644 index b4d2f94..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync_b06cc31b.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetJob_sync_b06cc31b] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_get_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetJob_sync_b06cc31b] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync_bbff024f.py b/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync_bbff024f.py deleted file mode 100644 index 9fd3103..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync_bbff024f.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetTask_sync_bbff024f] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_get_task(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetTask_sync_bbff024f] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync_fa6ae8a7.py b/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync_fa6ae8a7.py deleted file mode 100644 index 74f5d09..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync_fa6ae8a7.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetTask_sync_fa6ae8a7] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_get_task(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetTask_sync_fa6ae8a7] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync_2430eb9c.py b/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync_2430eb9c.py deleted file mode 100644 index 4c5643c..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync_2430eb9c.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListJobs_sync_2430eb9c] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_list_jobs(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListJobs_sync_2430eb9c] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync_d2310594.py b/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync_d2310594.py deleted file mode 100644 index 5fffe77..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync_d2310594.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListJobs_sync_d2310594] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_list_jobs(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListJobs_sync_d2310594] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync_9b3f04d1.py b/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync_9b3f04d1.py deleted file mode 100644 index b8b50f2..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync_9b3f04d1.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListTasks_sync_9b3f04d1] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_list_tasks(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListTasks_sync_9b3f04d1] diff --git a/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync_adf409cc.py b/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync_adf409cc.py deleted file mode 100644 index 3e5bad8..0000000 --- a/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync_adf409cc.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListTasks_sync_adf409cc] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_list_tasks(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListTasks_sync_adf409cc] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync_eb4fa717.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync_eb4fa717.py deleted file mode 100644 index 90801c0..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync_eb4fa717.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CreateJob_sync_eb4fa717] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_create_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CreateJob_sync_eb4fa717] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync_ee80cfe8.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync_ee80cfe8.py deleted file mode 100644 index 1c6700a..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync_ee80cfe8.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CreateJob_sync_ee80cfe8] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_create_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CreateJob_sync_ee80cfe8] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py index 03efd1c..ec76860 100644 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py +++ b/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py @@ -47,7 +47,7 @@ async def sample_delete_job(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync_18fb9154.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync_18fb9154.py deleted file mode 100644 index 52fb6a7..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync_18fb9154.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_DeleteJob_sync_18fb9154] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_delete_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_DeleteJob_sync_18fb9154] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync_affefd11.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync_affefd11.py deleted file mode 100644 index 0b33e67..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync_affefd11.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_DeleteJob_sync_affefd11] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_delete_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_DeleteJob_sync_affefd11] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync_1014e8c7.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync_1014e8c7.py deleted file mode 100644 index 0a6aa64..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync_1014e8c7.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetJob_sync_1014e8c7] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetJob_sync_1014e8c7] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync_17387e0a.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync_17387e0a.py deleted file mode 100644 index 0eb3247..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync_17387e0a.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetJob_sync_17387e0a] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetJob_sync_17387e0a] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync_29b69ea9.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync_29b69ea9.py deleted file mode 100644 index a969578..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync_29b69ea9.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetTask_sync_29b69ea9] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_task(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetTask_sync_29b69ea9] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync_657a0e4c.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync_657a0e4c.py deleted file mode 100644 index 2404c52..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync_657a0e4c.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetTask_sync_657a0e4c] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_task(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetTask_sync_657a0e4c] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync_21301b63.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync_21301b63.py deleted file mode 100644 index 799f233..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync_21301b63.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListJobs_sync_21301b63] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_jobs(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListJobs_sync_21301b63] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync_830dac29.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync_830dac29.py deleted file mode 100644 index 03cb615..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync_830dac29.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListJobs_sync_830dac29] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_jobs(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListJobs_sync_830dac29] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync_97ddc579.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync_97ddc579.py deleted file mode 100644 index 8c0ac9e..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync_97ddc579.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListTasks_sync_97ddc579] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_tasks(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListTasks_sync_97ddc579] diff --git a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync_af95f032.py b/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync_af95f032.py deleted file mode 100644 index c584fda..0000000 --- a/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync_af95f032.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListTasks_sync_af95f032] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_tasks(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListTasks_sync_af95f032] diff --git a/samples/generated_samples/snippet_metadata_batch_v1.json b/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json similarity index 65% rename from samples/generated_samples/snippet_metadata_batch_v1.json rename to samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index d9597c0..86c9432 100644 --- a/samples/generated_samples/snippet_metadata_batch_v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-batch" + "name": "google-cloud-batch", + "version": "0.5.0" }, "snippets": [ { @@ -149,7 +150,7 @@ "shortName": "create_job" }, "description": "Sample for CreateJob", - "file": "batch_v1_generated_batch_service_create_job_sync_6f38dd76.py", + "file": "batch_v1_generated_batch_service_create_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1_generated_BatchService_CreateJob_sync", @@ -185,95 +186,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1_generated_batch_service_create_job_sync_6f38dd76.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.create_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.CreateJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.batch_v1.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "batch_v1_generated_batch_service_create_job_sync_2bc66fef.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_CreateJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_create_job_sync_2bc66fef.py" + "title": "batch_v1_generated_batch_service_create_job_sync.py" }, { "canonical": true, @@ -398,87 +311,7 @@ "shortName": "delete_job" }, "description": "Sample for DeleteJob", - "file": "batch_v1_generated_batch_service_delete_job_sync_0196009c.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_DeleteJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_delete_job_sync_0196009c.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.delete_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.DeleteJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "batch_v1_generated_batch_service_delete_job_sync_91684552.py", + "file": "batch_v1_generated_batch_service_delete_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1_generated_BatchService_DeleteJob_sync", @@ -514,7 +347,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1_generated_batch_service_delete_job_sync_91684552.py" + "title": "batch_v1_generated_batch_service_delete_job_sync.py" }, { "canonical": true, @@ -639,87 +472,7 @@ "shortName": "get_job" }, "description": "Sample for GetJob", - "file": "batch_v1_generated_batch_service_get_job_sync_b06cc31b.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_get_job_sync_b06cc31b.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.get_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.GetJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "batch_v1_generated_batch_service_get_job_sync_97ad7e29.py", + "file": "batch_v1_generated_batch_service_get_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1_generated_BatchService_GetJob_sync", @@ -755,7 +508,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1_generated_batch_service_get_job_sync_97ad7e29.py" + "title": "batch_v1_generated_batch_service_get_job_sync.py" }, { "canonical": true, @@ -880,7 +633,7 @@ "shortName": "get_task" }, "description": "Sample for GetTask", - "file": "batch_v1_generated_batch_service_get_task_sync_fa6ae8a7.py", + "file": "batch_v1_generated_batch_service_get_task_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1_generated_BatchService_GetTask_sync", @@ -916,87 +669,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1_generated_batch_service_get_task_sync_fa6ae8a7.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.get_task", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.GetTask", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "batch_v1_generated_batch_service_get_task_sync_bbff024f.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_GetTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_get_task_sync_bbff024f.py" + "title": "batch_v1_generated_batch_service_get_task_sync.py" }, { "canonical": true, @@ -1121,87 +794,7 @@ "shortName": "list_jobs" }, "description": "Sample for ListJobs", - "file": "batch_v1_generated_batch_service_list_jobs_sync_2430eb9c.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_ListJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_list_jobs_sync_2430eb9c.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.ListJobs", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "batch_v1_generated_batch_service_list_jobs_sync_d2310594.py", + "file": "batch_v1_generated_batch_service_list_jobs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1_generated_BatchService_ListJobs_sync", @@ -1237,7 +830,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1_generated_batch_service_list_jobs_sync_d2310594.py" + "title": "batch_v1_generated_batch_service_list_jobs_sync.py" }, { "canonical": true, @@ -1362,87 +955,7 @@ "shortName": "list_tasks" }, "description": "Sample for ListTasks", - "file": "batch_v1_generated_batch_service_list_tasks_sync_9b3f04d1.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_ListTasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_list_tasks_sync_9b3f04d1.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.list_tasks", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.ListTasks", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListTasksPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "batch_v1_generated_batch_service_list_tasks_sync_adf409cc.py", + "file": "batch_v1_generated_batch_service_list_tasks_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1_generated_BatchService_ListTasks_sync", @@ -1478,7 +991,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1_generated_batch_service_list_tasks_sync_adf409cc.py" + "title": "batch_v1_generated_batch_service_list_tasks_sync.py" } ] } diff --git a/samples/generated_samples/snippet_metadata_batch_v1alpha.json b/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json similarity index 66% rename from samples/generated_samples/snippet_metadata_batch_v1alpha.json rename to samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 7966615..336b067 100644 --- a/samples/generated_samples/snippet_metadata_batch_v1alpha.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-batch" + "name": "google-cloud-batch", + "version": "0.5.0" }, "snippets": [ { @@ -149,7 +150,7 @@ "shortName": "create_job" }, "description": "Sample for CreateJob", - "file": "batch_v1alpha_generated_batch_service_create_job_sync_ee80cfe8.py", + "file": "batch_v1alpha_generated_batch_service_create_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1alpha_generated_BatchService_CreateJob_sync", @@ -185,95 +186,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1alpha_generated_batch_service_create_job_sync_ee80cfe8.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.create_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CreateJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.batch_v1alpha.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "batch_v1alpha_generated_batch_service_create_job_sync_eb4fa717.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CreateJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_create_job_sync_eb4fa717.py" + "title": "batch_v1alpha_generated_batch_service_create_job_sync.py" }, { "canonical": true, @@ -398,87 +311,7 @@ "shortName": "delete_job" }, "description": "Sample for DeleteJob", - "file": "batch_v1alpha_generated_batch_service_delete_job_sync_affefd11.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_DeleteJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_delete_job_sync_affefd11.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.delete_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.DeleteJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "batch_v1alpha_generated_batch_service_delete_job_sync_18fb9154.py", + "file": "batch_v1alpha_generated_batch_service_delete_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1alpha_generated_BatchService_DeleteJob_sync", @@ -514,7 +347,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1alpha_generated_batch_service_delete_job_sync_18fb9154.py" + "title": "batch_v1alpha_generated_batch_service_delete_job_sync.py" }, { "canonical": true, @@ -639,87 +472,7 @@ "shortName": "get_job" }, "description": "Sample for GetJob", - "file": "batch_v1alpha_generated_batch_service_get_job_sync_17387e0a.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_job_sync_17387e0a.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.get_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "batch_v1alpha_generated_batch_service_get_job_sync_1014e8c7.py", + "file": "batch_v1alpha_generated_batch_service_get_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1alpha_generated_BatchService_GetJob_sync", @@ -755,7 +508,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1alpha_generated_batch_service_get_job_sync_1014e8c7.py" + "title": "batch_v1alpha_generated_batch_service_get_job_sync.py" }, { "canonical": true, @@ -880,7 +633,7 @@ "shortName": "get_task" }, "description": "Sample for GetTask", - "file": "batch_v1alpha_generated_batch_service_get_task_sync_657a0e4c.py", + "file": "batch_v1alpha_generated_batch_service_get_task_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1alpha_generated_BatchService_GetTask_sync", @@ -916,87 +669,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1alpha_generated_batch_service_get_task_sync_657a0e4c.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.get_task", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetTask", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "batch_v1alpha_generated_batch_service_get_task_sync_29b69ea9.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_task_sync_29b69ea9.py" + "title": "batch_v1alpha_generated_batch_service_get_task_sync.py" }, { "canonical": true, @@ -1121,87 +794,7 @@ "shortName": "list_jobs" }, "description": "Sample for ListJobs", - "file": "batch_v1alpha_generated_batch_service_list_jobs_sync_830dac29.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_jobs_sync_830dac29.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListJobs", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "batch_v1alpha_generated_batch_service_list_jobs_sync_21301b63.py", + "file": "batch_v1alpha_generated_batch_service_list_jobs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1alpha_generated_BatchService_ListJobs_sync", @@ -1237,7 +830,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1alpha_generated_batch_service_list_jobs_sync_21301b63.py" + "title": "batch_v1alpha_generated_batch_service_list_jobs_sync.py" }, { "canonical": true, @@ -1362,87 +955,7 @@ "shortName": "list_tasks" }, "description": "Sample for ListTasks", - "file": "batch_v1alpha_generated_batch_service_list_tasks_sync_af95f032.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListTasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_tasks_sync_af95f032.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.list_tasks", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListTasks", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "batch_v1alpha_generated_batch_service_list_tasks_sync_97ddc579.py", + "file": "batch_v1alpha_generated_batch_service_list_tasks_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "batch_v1alpha_generated_BatchService_ListTasks_sync", @@ -1478,7 +991,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "batch_v1alpha_generated_batch_service_list_tasks_sync_97ddc579.py" + "title": "batch_v1alpha_generated_batch_service_list_tasks_sync.py" } ] } diff --git a/samples/noxfile.py b/samples/noxfile.py deleted file mode 100644 index b053ca5..0000000 --- a/samples/noxfile.py +++ /dev/null @@ -1,313 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/noxfile_config.py b/samples/noxfile_config.py deleted file mode 100644 index d27b90b..0000000 --- a/samples/noxfile_config.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -TEST_CONFIG_OVERRIDE = { - "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", -} diff --git a/samples/requirements-test.txt b/samples/requirements-test.txt deleted file mode 100644 index 586e344..0000000 --- a/samples/requirements-test.txt +++ /dev/null @@ -1,4 +0,0 @@ -pytest==7.2.0 -google-cloud-compute==1.6.1 -google-cloud-resource-manager==1.6.3 -google-cloud-storage==2.5.0 diff --git a/samples/requirements.txt b/samples/requirements.txt deleted file mode 100644 index 06ae926..0000000 --- a/samples/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -isort==5.10.1 -black==22.10.0 -google-cloud-batch==0.4.0 -google-cloud-logging==3.2.5 diff --git a/samples/snippets/__init__.py b/samples/snippets/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/samples/snippets/create/__init__.py b/samples/snippets/create/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/samples/snippets/create/create_with_container_no_mounting.py b/samples/snippets/create/create_with_container_no_mounting.py deleted file mode 100644 index fd7f655..0000000 --- a/samples/snippets/create/create_with_container_no_mounting.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_create_container_job] -from google.cloud import batch_v1 - - -def create_container_job(project_id: str, region: str, job_name: str) -> batch_v1.Job: - """ - This method shows how to create a sample Batch Job that will run - a simple command inside a container on Cloud Compute instances. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region you want to use to run the job. Regions that are - available for Batch are listed on: https://cloud.google.com/batch/docs/get-started#locations - job_name: the name of the job that will be created. - It needs to be unique for each project and region pair. - - Returns: - A job object representing the job created. - """ - client = batch_v1.BatchServiceClient() - - # Define what will be done as part of the job. - runnable = batch_v1.Runnable() - runnable.container = batch_v1.Runnable.Container() - runnable.container.image_uri = "gcr.io/google-containers/busybox" - runnable.container.entrypoint = "/bin/sh" - runnable.container.commands = ["-c", "echo Hello world! This is task ${BATCH_TASK_INDEX}. This job has a total of ${BATCH_TASK_COUNT} tasks."] - - # Jobs can be divided into tasks. In this case, we have only one task. - task = batch_v1.TaskSpec() - task.runnables = [runnable] - - # We can specify what resources are requested by each task. - resources = batch_v1.ComputeResource() - resources.cpu_milli = 2000 # in milliseconds per cpu-second. This means the task requires 2 whole CPUs. - resources.memory_mib = 16 # in MiB - task.compute_resource = resources - - task.max_retry_count = 2 - task.max_run_duration = "3600s" - - # Tasks are grouped inside a job using TaskGroups. - # Currently, it's possible to have only one task group. - group = batch_v1.TaskGroup() - group.task_count = 4 - group.task_spec = task - - # Policies are used to define on what kind of virtual machines the tasks will run on. - # In this case, we tell the system to use "e2-standard-4" machine type. - # Read more about machine types here: https://cloud.google.com/compute/docs/machine-types - policy = batch_v1.AllocationPolicy.InstancePolicy() - policy.machine_type = "e2-standard-4" - instances = batch_v1.AllocationPolicy.InstancePolicyOrTemplate() - instances.policy = policy - allocation_policy = batch_v1.AllocationPolicy() - allocation_policy.instances = [instances] - - job = batch_v1.Job() - job.task_groups = [group] - job.allocation_policy = allocation_policy - job.labels = {"env": "testing", "type": "container"} - # We use Cloud Logging as it's an out of the box available option - job.logs_policy = batch_v1.LogsPolicy() - job.logs_policy.destination = batch_v1.LogsPolicy.Destination.CLOUD_LOGGING - - create_request = batch_v1.CreateJobRequest() - create_request.job = job - create_request.job_id = job_name - # The job's parent is the region in which the job will run - create_request.parent = f"projects/{project_id}/locations/{region}" - - return client.create_job(create_request) -# [END batch_create_container_job] diff --git a/samples/snippets/create/create_with_mounted_bucket.py b/samples/snippets/create/create_with_mounted_bucket.py deleted file mode 100644 index d64821c..0000000 --- a/samples/snippets/create/create_with_mounted_bucket.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_create_script_job_with_bucket] -from google.cloud import batch_v1 - - -def create_script_job_with_bucket(project_id: str, region: str, job_name: str, bucket_name: str) -> batch_v1.Job: - """ - This method shows how to create a sample Batch Job that will run - a simple command on Cloud Compute instances. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region you want to use to run the job. Regions that are - available for Batch are listed on: https://cloud.google.com/batch/docs/get-started#locations - job_name: the name of the job that will be created. - It needs to be unique for each project and region pair. - bucket_name: name of the bucket to be mounted for your Job. - - Returns: - A job object representing the job created. - """ - client = batch_v1.BatchServiceClient() - - # Define what will be done as part of the job. - task = batch_v1.TaskSpec() - runnable = batch_v1.Runnable() - runnable.script = batch_v1.Runnable.Script() - runnable.script.text = "echo Hello world from task ${BATCH_TASK_INDEX}. >> /mnt/share/output_task_${BATCH_TASK_INDEX}.txt" - task.runnables = [runnable] - - gcs_bucket = batch_v1.GCS() - gcs_bucket.remote_path = bucket_name - gcs_volume = batch_v1.Volume() - gcs_volume.gcs = gcs_bucket - gcs_volume.mount_path = '/mnt/share' - task.volumes = [gcs_volume] - - # We can specify what resources are requested by each task. - resources = batch_v1.ComputeResource() - resources.cpu_milli = 500 # in milliseconds per cpu-second. This means the task requires 50% of a single CPUs. - resources.memory_mib = 16 - task.compute_resource = resources - - task.max_retry_count = 2 - task.max_run_duration = "3600s" - - # Tasks are grouped inside a job using TaskGroups. - # Currently, it's possible to have only one task group. - group = batch_v1.TaskGroup() - group.task_count = 4 - group.task_spec = task - - # Policies are used to define on what kind of virtual machines the tasks will run on. - # In this case, we tell the system to use "e2-standard-4" machine type. - # Read more about machine types here: https://cloud.google.com/compute/docs/machine-types - allocation_policy = batch_v1.AllocationPolicy() - policy = batch_v1.AllocationPolicy.InstancePolicy() - policy.machine_type = "e2-standard-4" - instances = batch_v1.AllocationPolicy.InstancePolicyOrTemplate() - instances.policy = policy - allocation_policy.instances = [instances] - - job = batch_v1.Job() - job.task_groups = [group] - job.allocation_policy = allocation_policy - job.labels = {"env": "testing", "type": "script", "mount": "bucket"} - # We use Cloud Logging as it's an out of the box available option - job.logs_policy = batch_v1.LogsPolicy() - job.logs_policy.destination = batch_v1.LogsPolicy.Destination.CLOUD_LOGGING - - create_request = batch_v1.CreateJobRequest() - create_request.job = job - create_request.job_id = job_name - # The job's parent is the region in which the job will run - create_request.parent = f"projects/{project_id}/locations/{region}" - - return client.create_job(create_request) -# [END batch_create_script_job_with_bucket] diff --git a/samples/snippets/create/create_with_script_no_mounting.py b/samples/snippets/create/create_with_script_no_mounting.py deleted file mode 100644 index 87a1dfe..0000000 --- a/samples/snippets/create/create_with_script_no_mounting.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_create_script_job] -from google.cloud import batch_v1 - - -def create_script_job(project_id: str, region: str, job_name: str) -> batch_v1.Job: - """ - This method shows how to create a sample Batch Job that will run - a simple command on Cloud Compute instances. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region you want to use to run the job. Regions that are - available for Batch are listed on: https://cloud.google.com/batch/docs/get-started#locations - job_name: the name of the job that will be created. - It needs to be unique for each project and region pair. - - Returns: - A job object representing the job created. - """ - client = batch_v1.BatchServiceClient() - - # Define what will be done as part of the job. - task = batch_v1.TaskSpec() - runnable = batch_v1.Runnable() - runnable.script = batch_v1.Runnable.Script() - runnable.script.text = "echo Hello world! This is task ${BATCH_TASK_INDEX}. This job has a total of ${BATCH_TASK_COUNT} tasks." - # You can also run a script from a file. Just remember, that needs to be a script that's - # already on the VM that will be running the job. Using runnable.script.text and runnable.script.path is mutually - # exclusive. - # runnable.script.path = '/tmp/test.sh' - task.runnables = [runnable] - - # We can specify what resources are requested by each task. - resources = batch_v1.ComputeResource() - resources.cpu_milli = 2000 # in milliseconds per cpu-second. This means the task requires 2 whole CPUs. - resources.memory_mib = 16 - task.compute_resource = resources - - task.max_retry_count = 2 - task.max_run_duration = "3600s" - - # Tasks are grouped inside a job using TaskGroups. - # Currently, it's possible to have only one task group. - group = batch_v1.TaskGroup() - group.task_count = 4 - group.task_spec = task - - # Policies are used to define on what kind of virtual machines the tasks will run on. - # In this case, we tell the system to use "e2-standard-4" machine type. - # Read more about machine types here: https://cloud.google.com/compute/docs/machine-types - allocation_policy = batch_v1.AllocationPolicy() - policy = batch_v1.AllocationPolicy.InstancePolicy() - policy.machine_type = "e2-standard-4" - instances = batch_v1.AllocationPolicy.InstancePolicyOrTemplate() - instances.policy = policy - allocation_policy.instances = [instances] - - job = batch_v1.Job() - job.task_groups = [group] - job.allocation_policy = allocation_policy - job.labels = {"env": "testing", "type": "script"} - # We use Cloud Logging as it's an out of the box available option - job.logs_policy = batch_v1.LogsPolicy() - job.logs_policy.destination = batch_v1.LogsPolicy.Destination.CLOUD_LOGGING - - create_request = batch_v1.CreateJobRequest() - create_request.job = job - create_request.job_id = job_name - # The job's parent is the region in which the job will run - create_request.parent = f"projects/{project_id}/locations/{region}" - - return client.create_job(create_request) -# [END batch_create_script_job] diff --git a/samples/snippets/create/create_with_template.py b/samples/snippets/create/create_with_template.py deleted file mode 100644 index c37a94f..0000000 --- a/samples/snippets/create/create_with_template.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_create_job_with_template] -from google.cloud import batch_v1 - - -def create_script_job_with_template(project_id: str, region: str, job_name: str, template_link: str) -> batch_v1.Job: - """ - This method shows how to create a sample Batch Job that will run - a simple command on Cloud Compute instances created using a provided Template. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region you want to use to run the job. Regions that are - available for Batch are listed on: https://cloud.google.com/batch/docs/get-started#locations - job_name: the name of the job that will be created. - It needs to be unique for each project and region pair. - template_link: a link to an existing Instance Template. Acceptable formats: - * "projects/{project_id}/global/instanceTemplates/{template_name}" - * "{template_name}" - if the template is defined in the same project as used to create the Job. - - Returns: - A job object representing the job created. - """ - client = batch_v1.BatchServiceClient() - - # Define what will be done as part of the job. - task = batch_v1.TaskSpec() - runnable = batch_v1.Runnable() - runnable.script = batch_v1.Runnable.Script() - runnable.script.text = "echo Hello world! This is task ${BATCH_TASK_INDEX}. This job has a total of ${BATCH_TASK_COUNT} tasks." - # You can also run a script from a file. Just remember, that needs to be a script that's - # already on the VM that will be running the job. Using runnable.script.text and runnable.script.path is mutually - # exclusive. - # runnable.script.path = '/tmp/test.sh' - task.runnables = [runnable] - - # We can specify what resources are requested by each task. - resources = batch_v1.ComputeResource() - resources.cpu_milli = 2000 # in milliseconds per cpu-second. This means the task requires 2 whole CPUs. - resources.memory_mib = 16 - task.compute_resource = resources - - task.max_retry_count = 2 - task.max_run_duration = "3600s" - - # Tasks are grouped inside a job using TaskGroups. - # Currently, it's possible to have only one task group. - group = batch_v1.TaskGroup() - group.task_count = 4 - group.task_spec = task - - # Policies are used to define on what kind of virtual machines the tasks will run on. - # In this case, we tell the system to use an instance template that defines all the - # required parameters. - allocation_policy = batch_v1.AllocationPolicy() - instances = batch_v1.AllocationPolicy.InstancePolicyOrTemplate() - instances.instance_template = template_link - allocation_policy.instances = [instances] - - job = batch_v1.Job() - job.task_groups = [group] - job.allocation_policy = allocation_policy - job.labels = {"env": "testing", "type": "script"} - # We use Cloud Logging as it's an out of the box available option - job.logs_policy = batch_v1.LogsPolicy() - job.logs_policy.destination = batch_v1.LogsPolicy.Destination.CLOUD_LOGGING - - create_request = batch_v1.CreateJobRequest() - create_request.job = job - create_request.job_id = job_name - # The job's parent is the region in which the job will run - create_request.parent = f"projects/{project_id}/locations/{region}" - - return client.create_job(create_request) -# [END batch_create_job_with_template] diff --git a/samples/snippets/delete/delete_job.py b/samples/snippets/delete/delete_job.py deleted file mode 100644 index f4ebdcd..0000000 --- a/samples/snippets/delete/delete_job.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_delete_job] -from google.api_core.operation import Operation - -from google.cloud import batch_v1 - - -def delete_job(project_id: str, region: str, job_name: str) -> Operation: - """ - Triggers the deletion of a Job. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region hosts the job. - job_name: the name of the job that you want to delete. - - Returns: - An operation object related to the deletion. You can call `.result()` - on it to wait for its completion. - """ - client = batch_v1.BatchServiceClient() - - return client.delete_job(name=f"projects/{project_id}/locations/{region}/jobs/{job_name}") -# [END batch_delete_job] diff --git a/samples/snippets/get/get_job.py b/samples/snippets/get/get_job.py deleted file mode 100644 index 8dff737..0000000 --- a/samples/snippets/get/get_job.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_get_job] - -from google.cloud import batch_v1 - - -def get_job(project_id: str, region: str, job_name: str) -> batch_v1.Job: - """ - Retrieve information about a Batch Job. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region hosts the job. - job_name: the name of the job you want to retrieve information about. - - Returns: - A Job object representing the specified job. - """ - client = batch_v1.BatchServiceClient() - - return client.get_job(name=f"projects/{project_id}/locations/{region}/jobs/{job_name}") -# [END batch_get_job] diff --git a/samples/snippets/get/get_task.py b/samples/snippets/get/get_task.py deleted file mode 100644 index ea5fc5c..0000000 --- a/samples/snippets/get/get_task.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_get_task] - -from google.cloud import batch_v1 - - -def get_task(project_id: str, region: str, job_name: str, group_name: str, task_number: int) -> batch_v1.Task: - """ - Retrieve information about a Task. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region hosts the job. - job_name: the name of the job you want to retrieve information about. - group_name: the name of the group that owns the task you want to check. Usually it's `group0`. - task_number: number of the task you want to look up. - - Returns: - A Task object representing the specified task. - """ - client = batch_v1.BatchServiceClient() - - return client.get_task(name=f"projects/{project_id}/locations/{region}/jobs/{job_name}" - f"/taskGroups/{group_name}/tasks/{task_number}") -# [END batch_get_task] diff --git a/samples/snippets/list/list_jobs.py b/samples/snippets/list/list_jobs.py deleted file mode 100644 index e52f4de..0000000 --- a/samples/snippets/list/list_jobs.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_list_jobs] -from typing import Iterable - -from google.cloud import batch_v1 - - -def list_jobs(project_id: str, region: str) -> Iterable[batch_v1.Job]: - """ - Get a list of all jobs defined in given region. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region hosting the jobs. - - Returns: - An iterable collection of Job object. - """ - client = batch_v1.BatchServiceClient() - - return client.list_jobs(parent=f"projects/{project_id}/locations/{region}") -# [END batch_list_jobs] diff --git a/samples/snippets/list/list_tasks.py b/samples/snippets/list/list_tasks.py deleted file mode 100644 index 9ef6674..0000000 --- a/samples/snippets/list/list_tasks.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START batch_list_tasks] -from typing import Iterable - -from google.cloud import batch_v1 - - -def list_tasks(project_id: str, region: str, job_name: str, group_name: str) -> Iterable[batch_v1.Task]: - """ - Get a list of all jobs defined in given region. - - Args: - project_id: project ID or project number of the Cloud project you want to use. - region: name of the region hosting the jobs. - job_name: name of the job which tasks you want to list. - group_name: name of the group of tasks. Usually it's `group0`. - - Returns: - An iterable collection of Task objects. - """ - client = batch_v1.BatchServiceClient() - - return client.list_tasks(parent=f"projects/{project_id}/locations/{region}/jobs/{job_name}/taskGroups/{group_name}") -# [END batch_list_tasks] diff --git a/samples/snippets/logs/read_job_logs.py b/samples/snippets/logs/read_job_logs.py deleted file mode 100644 index d9c227a..0000000 --- a/samples/snippets/logs/read_job_logs.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# [START batch_job_logs] -from typing import NoReturn - -from google.cloud import batch_v1 -from google.cloud import logging - - -def print_job_logs(project_id: str, job: batch_v1.Job) -> NoReturn: - """ - Prints the log messages created by given job. - - Args: - project_id: name of the project hosting the job. - job: the job which logs you want to print. - """ - # Initialize client that will be used to send requests across threads. This - # client only needs to be created once, and can be reused for multiple requests. - log_client = logging.Client(project=project_id) - logger = log_client.logger("batch_task_logs") - - for log_entry in logger.list_entries(filter_=f"labels.job_uid={job.uid}"): - print(log_entry.payload) - -# [END batch_job_logs] diff --git a/samples/snippets/tests/__init__.py b/samples/snippets/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/samples/snippets/tests/test_basics.py b/samples/snippets/tests/test_basics.py deleted file mode 100644 index 72a11f8..0000000 --- a/samples/snippets/tests/test_basics.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import time -from typing import Callable -import uuid - -import google.auth -from google.cloud import batch_v1 -import pytest - -from ..create.create_with_container_no_mounting import create_container_job -from ..create.create_with_script_no_mounting import create_script_job - -from ..delete.delete_job import delete_job -from ..get.get_job import get_job -from ..get.get_task import get_task -from ..list.list_jobs import list_jobs -from ..list.list_tasks import list_tasks -from ..logs.read_job_logs import print_job_logs - -PROJECT = google.auth.default()[1] -REGION = 'europe-north1' - -TIMEOUT = 600 # 10 minutes - -WAIT_STATES = { - batch_v1.JobStatus.State.STATE_UNSPECIFIED, - batch_v1.JobStatus.State.QUEUED, - batch_v1.JobStatus.State.RUNNING, - batch_v1.JobStatus.State.SCHEDULED, - batch_v1.JobStatus.State.DELETION_IN_PROGRESS -} - - -@pytest.fixture -def job_name(): - return f"test-job-{uuid.uuid4().hex[:10]}" - - -def _test_body(test_job: batch_v1.Job, additional_test: Callable = None): - start_time = time.time() - try: - while test_job.status.state in WAIT_STATES: - if time.time() - start_time > TIMEOUT: - pytest.fail("Timed out while waiting for job to complete!") - test_job = get_job(PROJECT, REGION, test_job.name.rsplit('/', maxsplit=1)[1]) - time.sleep(5) - - assert test_job.status.state == batch_v1.JobStatus.State.SUCCEEDED - - for job in list_jobs(PROJECT, REGION): - if test_job.uid == job.uid: - break - else: - pytest.fail(f"Couldn't find job {test_job.uid} on the list of jobs.") - - if additional_test: - additional_test() - finally: - delete_job(PROJECT, REGION, test_job.name.rsplit('/', maxsplit=1)[1]).result() - - for job in list_jobs(PROJECT, REGION): - if job.uid == test_job.uid: - pytest.fail("The test job should be deleted at this point!") - - -def _check_tasks(job_name): - tasks = list_tasks(PROJECT, REGION, job_name, 'group0') - assert len(list(tasks)) == 4 - for i in range(4): - assert get_task(PROJECT, REGION, job_name, 'group0', i) is not None - print('Tasks tested') - - -def _check_logs(job, capsys): - print_job_logs(PROJECT, job) - output = [line for line in capsys.readouterr().out.splitlines(keepends=False) if line != ""] - assert len(output) == 4 - assert all(log_msg.startswith("STDOUT") for log_msg in output) - - -def test_script_job(job_name, capsys): - job = create_script_job(PROJECT, REGION, job_name) - _test_body(job, additional_test=lambda: _check_logs(job, capsys)) - - -def test_container_job(job_name): - job = create_container_job(PROJECT, REGION, job_name) - _test_body(job, additional_test=lambda: _check_tasks(job_name)) diff --git a/samples/snippets/tests/test_bucket.py b/samples/snippets/tests/test_bucket.py deleted file mode 100644 index ad8a347..0000000 --- a/samples/snippets/tests/test_bucket.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import uuid - - -import google.auth -from google.cloud import batch_v1 -from google.cloud import storage -import pytest - -from .test_basics import _test_body -from ..create.create_with_mounted_bucket import create_script_job_with_bucket - -PROJECT = google.auth.default()[1] -REGION = 'europe-north1' - -TIMEOUT = 600 # 10 minutes - -WAIT_STATES = { - batch_v1.JobStatus.State.STATE_UNSPECIFIED, - batch_v1.JobStatus.State.QUEUED, - batch_v1.JobStatus.State.RUNNING, - batch_v1.JobStatus.State.SCHEDULED, -} - - -@pytest.fixture -def job_name(): - return f"test-job-{uuid.uuid4().hex[:10]}" - - -@pytest.fixture() -def test_bucket(): - bucket_name = f"test-bucket-{uuid.uuid4().hex[:8]}" - client = storage.Client() - client.create_bucket(bucket_name, location="eu") - - yield bucket_name - - bucket = client.get_bucket(bucket_name) - bucket.delete(force=True) - - -def _test_bucket_content(test_bucket): - client = storage.Client() - bucket = client.get_bucket(test_bucket) - - file_name_template = "output_task_{task_number}.txt" - file_content_template = "Hello world from task {task_number}.\n" - - for i in range(4): - blob = bucket.blob(file_name_template.format(task_number=i)) - content = blob.download_as_bytes().decode() - assert content == file_content_template.format(task_number=i) - - -def test_bucket_job(job_name, test_bucket): - job = create_script_job_with_bucket(PROJECT, REGION, job_name, test_bucket) - _test_body(job, lambda: _test_bucket_content(test_bucket)) diff --git a/samples/snippets/tests/test_template.py b/samples/snippets/tests/test_template.py deleted file mode 100644 index 5728117..0000000 --- a/samples/snippets/tests/test_template.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import uuid - -import google.auth -from google.cloud import batch_v1 -from google.cloud import compute_v1 -from google.cloud import resourcemanager_v3 -import pytest - - -from .test_basics import _test_body - -from ..create.create_with_template import create_script_job_with_template - -PROJECT = google.auth.default()[1] - -PROJECT_NUMBER = resourcemanager_v3.ProjectsClient().get_project(name=f"projects/{PROJECT}").name.split("/")[1] - -REGION = 'europe-north1' - -TIMEOUT = 600 # 10 minutes - -WAIT_STATES = { - batch_v1.JobStatus.State.STATE_UNSPECIFIED, - batch_v1.JobStatus.State.QUEUED, - batch_v1.JobStatus.State.RUNNING, - batch_v1.JobStatus.State.SCHEDULED, -} - - -@pytest.fixture -def job_name(): - return f"test-job-{uuid.uuid4().hex[:10]}" - - -@pytest.fixture -def instance_template(): - disk = compute_v1.AttachedDisk() - initialize_params = compute_v1.AttachedDiskInitializeParams() - initialize_params.source_image = ( - "projects/ubuntu-os-cloud/global/images/family/ubuntu-2204-lts" - ) - initialize_params.disk_size_gb = 25 - initialize_params.disk_type = 'pd-balanced' - disk.initialize_params = initialize_params - disk.auto_delete = True - disk.boot = True - - network_interface = compute_v1.NetworkInterface() - network_interface.name = "global/networks/default" - - access = compute_v1.AccessConfig() - access.type_ = compute_v1.AccessConfig.Type.ONE_TO_ONE_NAT.name - access.name = "External NAT" - access.network_tier = access.NetworkTier.PREMIUM.name - network_interface.access_configs = [access] - - template = compute_v1.InstanceTemplate() - template.name = "test-template-" + uuid.uuid4().hex[:10] - template.properties = compute_v1.InstanceProperties() - template.properties.disks = [disk] - template.properties.machine_type = "e2-standard-16" - template.properties.network_interfaces = [network_interface] - - template.properties.scheduling = compute_v1.Scheduling() - template.properties.scheduling.on_host_maintenance = compute_v1.Scheduling.OnHostMaintenance.MIGRATE.name - template.properties.scheduling.provisioning_model = compute_v1.Scheduling.ProvisioningModel.STANDARD.name - template.properties.scheduling.automatic_restart = True - - template.properties.service_accounts = [ - { - "email": f"{PROJECT_NUMBER}-compute@developer.gserviceaccount.com", - "scopes": [ - "https://www.googleapis.com/auth/devstorage.read_only", - "https://www.googleapis.com/auth/logging.write", - "https://www.googleapis.com/auth/monitoring.write", - "https://www.googleapis.com/auth/servicecontrol", - "https://www.googleapis.com/auth/service.management.readonly", - "https://www.googleapis.com/auth/trace.append" - ] - } - ] - - template_client = compute_v1.InstanceTemplatesClient() - operation_client = compute_v1.GlobalOperationsClient() - op = template_client.insert_unary( - project=PROJECT, instance_template_resource=template - ) - operation_client.wait(project=PROJECT, operation=op.name) - - template = template_client.get(project=PROJECT, instance_template=template.name) - - yield template - - op = template_client.delete_unary(project=PROJECT, instance_template=template.name) - operation_client.wait(project=PROJECT, operation=op.name) - - -def test_template_job(job_name, instance_template): - job = create_script_job_with_template(PROJECT, REGION, job_name, instance_template.self_link) - _test_body(job) diff --git a/setup.py b/setup.py index d61f02d..3885cdb 100644 --- a/setup.py +++ b/setup.py @@ -1,33 +1,45 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# import io import os -import setuptools +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-batch" -description = "Cloud Batch API client library" -version = "0.4.1" -release_status = "Development Status :: 4 - Beta" + + +description = "Google Cloud Batch API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/batch/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", ] url = "https://github.com/googleapis/python-batch" @@ -47,7 +59,6 @@ if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29..ad3f0fa 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29..ad3f0fa 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 883a87a..2beecf9 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e69de29..ad3f0fa 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index e69de29..ad3f0fa 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/tests/unit/gapic/batch_v1/test_batch_service.py b/tests/unit/gapic/batch_v1/test_batch_service.py index 8c59a3e..cd434c0 100644 --- a/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/tests/unit/gapic/batch_v1/test_batch_service.py @@ -22,41 +22,25 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio from collections.abc import Iterable -from google.protobuf import json_format import json import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.batch_v1.services.batch_service import BatchServiceAsyncClient -from google.cloud.batch_v1.services.batch_service import BatchServiceClient -from google.cloud.batch_v1.services.batch_service import pagers -from google.cloud.batch_v1.services.batch_service import transports -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.batch_v1.types import volume from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore @@ -64,8 +48,27 @@ from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.batch_v1.services.batch_service import ( + BatchServiceAsyncClient, + BatchServiceClient, + pagers, + transports, +) +from google.cloud.batch_v1.types import batch +from google.cloud.batch_v1.types import job +from google.cloud.batch_v1.types import job as gcb_job +from google.cloud.batch_v1.types import task, volume def client_cert_source_callback(): @@ -2760,7 +2763,7 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): response = client.create_job(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params @@ -3181,7 +3184,7 @@ def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): response = client.get_job(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params @@ -3856,7 +3859,7 @@ def test_get_task_rest_required_fields(request_type=batch.GetTaskRequest): response = client.get_task(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params @@ -4134,7 +4137,7 @@ def test_list_tasks_rest_required_fields(request_type=batch.ListTasksRequest): response = client.list_tasks(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params diff --git a/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 0ba1ce6..f0256a4 100644 --- a/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -22,41 +22,25 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio from collections.abc import Iterable -from google.protobuf import json_format import json import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.batch_v1alpha.services.batch_service import BatchServiceAsyncClient -from google.cloud.batch_v1alpha.services.batch_service import BatchServiceClient -from google.cloud.batch_v1alpha.services.batch_service import pagers -from google.cloud.batch_v1alpha.services.batch_service import transports -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import task -from google.cloud.batch_v1alpha.types import volume from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore @@ -64,8 +48,27 @@ from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.batch_v1alpha.services.batch_service import ( + BatchServiceAsyncClient, + BatchServiceClient, + pagers, + transports, +) +from google.cloud.batch_v1alpha.types import batch +from google.cloud.batch_v1alpha.types import job +from google.cloud.batch_v1alpha.types import job as gcb_job +from google.cloud.batch_v1alpha.types import task, volume def client_cert_source_callback(): @@ -2640,6 +2643,7 @@ def test_create_job_rest(request_type): "device_name": "device_name_value", } ], + "reservation": "reservation_value", }, "instances": [ { @@ -2812,7 +2816,7 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): response = client.create_job(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params @@ -3015,6 +3019,7 @@ def test_create_job_rest_bad_request( "device_name": "device_name_value", } ], + "reservation": "reservation_value", }, "instances": [ { @@ -3271,7 +3276,7 @@ def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): response = client.get_job(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params @@ -3946,7 +3951,7 @@ def test_get_task_rest_required_fields(request_type=batch.GetTaskRequest): response = client.get_task(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params @@ -4224,7 +4229,7 @@ def test_list_tasks_rest_required_fields(request_type=batch.ListTasksRequest): response = client.list_tasks(request) - expected_params = [] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params