From 0b096801953e6dbc55b5bf0a1ceaea56bf957578 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 23 Apr 2025 22:07:01 +0000 Subject: [PATCH 1/3] docs: update expected format of Job name field PiperOrigin-RevId: 750693042 Source-Link: https://github.com/googleapis/googleapis/commit/889c45795a8ec97eae3bfc9c903db698621c540c Source-Link: https://github.com/googleapis/googleapis-gen/commit/73a50f9452a56755efa71dfcb720ed7c43b69ecf Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLXN0b3JhZ2ViYXRjaG9wZXJhdGlvbnMvLk93bEJvdC55YW1sIiwiaCI6IjczYTUwZjk0NTJhNTY3NTVlZmE3MWRmY2I3MjBlZDdjNDNiNjllY2YifQ== --- .../v1/.coveragerc | 13 + .../v1/.flake8 | 34 + .../v1/LICENSE | 202 + .../v1/MANIFEST.in | 20 + .../v1/README.rst | 143 + .../v1/docs/_static/custom.css | 20 + .../v1/docs/_templates/layout.html | 50 + .../v1/docs/conf.py | 385 + .../v1/docs/index.rst | 10 + .../v1/docs/multiprocessing.rst | 7 + .../storagebatchoperations_v1/services_.rst | 6 + .../storage_batch_operations.rst | 10 + .../docs/storagebatchoperations_v1/types_.rst | 6 + .../cloud/storagebatchoperations/__init__.py | 67 + .../storagebatchoperations/gapic_version.py | 16 + .../cloud/storagebatchoperations/py.typed | 2 + .../storagebatchoperations_v1/__init__.py | 68 + .../gapic_metadata.json | 103 + .../gapic_version.py | 16 + .../cloud/storagebatchoperations_v1/py.typed | 2 + .../services/__init__.py | 15 + .../storage_batch_operations/__init__.py | 22 + .../storage_batch_operations/async_client.py | 1153 +++ .../storage_batch_operations/client.py | 1530 ++++ .../storage_batch_operations/pagers.py | 167 + .../transports/README.rst | 9 + .../transports/__init__.py | 38 + .../transports/base.py | 330 + .../transports/grpc.py | 584 ++ .../transports/grpc_asyncio.py | 682 ++ .../transports/rest.py | 1819 +++++ .../transports/rest_base.py | 455 ++ .../types/__init__.py | 62 + .../types/storage_batch_operations.py | 288 + .../types/storage_batch_operations_types.py | 653 ++ .../v1/mypy.ini | 3 + .../v1/noxfile.py | 591 ++ ...oogle.cloud.storagebatchoperations.v1.json | 830 +++ ...orage_batch_operations_cancel_job_async.py | 52 + ...torage_batch_operations_cancel_job_sync.py | 52 + ...orage_batch_operations_create_job_async.py | 63 + ...torage_batch_operations_create_job_sync.py | 63 + ...orage_batch_operations_delete_job_async.py | 50 + ...torage_batch_operations_delete_job_sync.py | 50 + ..._storage_batch_operations_get_job_async.py | 52 + ...d_storage_batch_operations_get_job_sync.py | 52 + ...torage_batch_operations_list_jobs_async.py | 53 + ...storage_batch_operations_list_jobs_sync.py | 53 + ...ixup_storagebatchoperations_v1_keywords.py | 180 + .../v1/setup.py | 98 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.13.txt | 11 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../storagebatchoperations_v1/__init__.py | 16 + .../test_storage_batch_operations.py | 6318 +++++++++++++++++ 62 files changed, 17662 insertions(+) create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc b/owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc new file mode 100644 index 000000000000..bf5f3b460141 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/storagebatchoperations/__init__.py + google/cloud/storagebatchoperations/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 b/owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 new file mode 100644 index 000000000000..90316de21489 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE b/owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in b/owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in new file mode 100644 index 000000000000..dae249ec8976 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst new file mode 100644 index 000000000000..d53120593783 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Storagebatchoperations API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Storagebatchoperations API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html new file mode 100644 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py new file mode 100644 index 000000000000..5ba6711f5cb5 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-storagebatchoperations documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-storagebatchoperations" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-storagebatchoperations", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-storagebatchoperations-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-storagebatchoperations.tex", + u"google-cloud-storagebatchoperations Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-storagebatchoperations", + "google-cloud-storagebatchoperations Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-storagebatchoperations", + "google-cloud-storagebatchoperations Documentation", + author, + "google-cloud-storagebatchoperations", + "google-cloud-storagebatchoperations Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst new file mode 100644 index 000000000000..fcca274c7f0b --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + storagebatchoperations_v1/services_ + storagebatchoperations_v1/types_ diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst new file mode 100644 index 000000000000..7457657d5b20 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Storagebatchoperations v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + storage_batch_operations diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst new file mode 100644 index 000000000000..ae1ef85a3689 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst @@ -0,0 +1,10 @@ +StorageBatchOperations +---------------------------------------- + +.. automodule:: google.cloud.storagebatchoperations_v1.services.storage_batch_operations + :members: + :inherited-members: + +.. automodule:: google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst new file mode 100644 index 000000000000..2c6e1fefedd6 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Storagebatchoperations v1 API +==================================================== + +.. automodule:: google.cloud.storagebatchoperations_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py new file mode 100644 index 000000000000..62cc717939c9 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.storagebatchoperations import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations.client import StorageBatchOperationsClient +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations.async_client import StorageBatchOperationsAsyncClient + +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import CancelJobRequest +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import CancelJobResponse +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import CreateJobRequest +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import DeleteJobRequest +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import GetJobRequest +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import ListJobsRequest +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import ListJobsResponse +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import OperationMetadata +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import BucketList +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import Counters +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import DeleteObject +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import ErrorLogEntry +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import ErrorSummary +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import Job +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import LoggingConfig +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import Manifest +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import PrefixList +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import PutMetadata +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import PutObjectHold +from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import RewriteObject + +__all__ = ('StorageBatchOperationsClient', + 'StorageBatchOperationsAsyncClient', + 'CancelJobRequest', + 'CancelJobResponse', + 'CreateJobRequest', + 'DeleteJobRequest', + 'GetJobRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'OperationMetadata', + 'BucketList', + 'Counters', + 'DeleteObject', + 'ErrorLogEntry', + 'ErrorSummary', + 'Job', + 'LoggingConfig', + 'Manifest', + 'PrefixList', + 'PutMetadata', + 'PutObjectHold', + 'RewriteObject', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py new file mode 100644 index 000000000000..20a9cd975b02 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed new file mode 100644 index 000000000000..bc2c99219089 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-storagebatchoperations package uses inline types. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py new file mode 100644 index 000000000000..373fd0e17e75 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.storagebatchoperations_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.storage_batch_operations import StorageBatchOperationsClient +from .services.storage_batch_operations import StorageBatchOperationsAsyncClient + +from .types.storage_batch_operations import CancelJobRequest +from .types.storage_batch_operations import CancelJobResponse +from .types.storage_batch_operations import CreateJobRequest +from .types.storage_batch_operations import DeleteJobRequest +from .types.storage_batch_operations import GetJobRequest +from .types.storage_batch_operations import ListJobsRequest +from .types.storage_batch_operations import ListJobsResponse +from .types.storage_batch_operations import OperationMetadata +from .types.storage_batch_operations_types import BucketList +from .types.storage_batch_operations_types import Counters +from .types.storage_batch_operations_types import DeleteObject +from .types.storage_batch_operations_types import ErrorLogEntry +from .types.storage_batch_operations_types import ErrorSummary +from .types.storage_batch_operations_types import Job +from .types.storage_batch_operations_types import LoggingConfig +from .types.storage_batch_operations_types import Manifest +from .types.storage_batch_operations_types import PrefixList +from .types.storage_batch_operations_types import PutMetadata +from .types.storage_batch_operations_types import PutObjectHold +from .types.storage_batch_operations_types import RewriteObject + +__all__ = ( + 'StorageBatchOperationsAsyncClient', +'BucketList', +'CancelJobRequest', +'CancelJobResponse', +'Counters', +'CreateJobRequest', +'DeleteJobRequest', +'DeleteObject', +'ErrorLogEntry', +'ErrorSummary', +'GetJobRequest', +'Job', +'ListJobsRequest', +'ListJobsResponse', +'LoggingConfig', +'Manifest', +'OperationMetadata', +'PrefixList', +'PutMetadata', +'PutObjectHold', +'RewriteObject', +'StorageBatchOperationsClient', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json new file mode 100644 index 000000000000..c5bc99c44670 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.storagebatchoperations_v1", + "protoPackage": "google.cloud.storagebatchoperations.v1", + "schema": "1.0", + "services": { + "StorageBatchOperations": { + "clients": { + "grpc": { + "libraryClient": "StorageBatchOperationsClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "StorageBatchOperationsAsyncClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + }, + "rest": { + "libraryClient": "StorageBatchOperationsClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py new file mode 100644 index 000000000000..20a9cd975b02 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed new file mode 100644 index 000000000000..bc2c99219089 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-storagebatchoperations package uses inline types. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py new file mode 100644 index 000000000000..cbf94b283c70 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py new file mode 100644 index 000000000000..d13eaefe23de --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import StorageBatchOperationsClient +from .async_client import StorageBatchOperationsAsyncClient + +__all__ = ( + 'StorageBatchOperationsClient', + 'StorageBatchOperationsAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py new file mode 100644 index 000000000000..70599dae075e --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.storagebatchoperations_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import pagers +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import StorageBatchOperationsGrpcAsyncIOTransport +from .client import StorageBatchOperationsClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class StorageBatchOperationsAsyncClient: + """Storage Batch Operations offers a managed experience to + perform batch operations on millions of Cloud Storage objects in + a serverless fashion. With this service, you can automate and + simplify large scale API operations performed on Cloud Storage + objects. + """ + + _client: StorageBatchOperationsClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = StorageBatchOperationsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = StorageBatchOperationsClient._DEFAULT_UNIVERSE + + crypto_key_path = staticmethod(StorageBatchOperationsClient.crypto_key_path) + parse_crypto_key_path = staticmethod(StorageBatchOperationsClient.parse_crypto_key_path) + job_path = staticmethod(StorageBatchOperationsClient.job_path) + parse_job_path = staticmethod(StorageBatchOperationsClient.parse_job_path) + common_billing_account_path = staticmethod(StorageBatchOperationsClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(StorageBatchOperationsClient.parse_common_billing_account_path) + common_folder_path = staticmethod(StorageBatchOperationsClient.common_folder_path) + parse_common_folder_path = staticmethod(StorageBatchOperationsClient.parse_common_folder_path) + common_organization_path = staticmethod(StorageBatchOperationsClient.common_organization_path) + parse_common_organization_path = staticmethod(StorageBatchOperationsClient.parse_common_organization_path) + common_project_path = staticmethod(StorageBatchOperationsClient.common_project_path) + parse_common_project_path = staticmethod(StorageBatchOperationsClient.parse_common_project_path) + common_location_path = staticmethod(StorageBatchOperationsClient.common_location_path) + parse_common_location_path = staticmethod(StorageBatchOperationsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageBatchOperationsAsyncClient: The constructed client. + """ + return StorageBatchOperationsClient.from_service_account_info.__func__(StorageBatchOperationsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageBatchOperationsAsyncClient: The constructed client. + """ + return StorageBatchOperationsClient.from_service_account_file.__func__(StorageBatchOperationsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return StorageBatchOperationsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> StorageBatchOperationsTransport: + """Returns the transport used by the client instance. + + Returns: + StorageBatchOperationsTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = StorageBatchOperationsClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, StorageBatchOperationsTransport, Callable[..., StorageBatchOperationsTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the storage batch operations async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,StorageBatchOperationsTransport,Callable[..., StorageBatchOperationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the StorageBatchOperationsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = StorageBatchOperationsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient`.", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "credentialsType": None, + } + ) + + async def list_jobs(self, + request: Optional[Union[storage_batch_operations.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists Jobs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_list_jobs(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.ListJobsRequest, dict]]): + The request object. Message for request to list Jobs + parent (:class:`str`): + Required. Format: + projects/{project_id}/locations/{location_id} . + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsAsyncPager: + Message for response to listing Jobs + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.ListJobsRequest): + request = storage_batch_operations.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: Optional[Union[storage_batch_operations.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations_types.Job: + r"""Gets a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_get_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.GetJobRequest, dict]]): + The request object. Message for getting a Job + name (:class:`str`): + Required. ``name`` of the job to retrieve. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id} + . + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.types.Job: + The Storage Batch Operations Job + description. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.GetJobRequest): + request = storage_batch_operations.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_job(self, + request: Optional[Union[storage_batch_operations.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[storage_batch_operations_types.Job] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_create_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + job = storagebatchoperations_v1.Job() + job.bucket_list.buckets.bucket = "bucket_value" + job.put_object_hold.temporary_hold = "UNSET" + job.put_object_hold.event_based_hold = "UNSET" + + request = storagebatchoperations_v1.CreateJobRequest( + parent="parent_value", + job_id="job_id_value", + job=job, + ) + + # Make the request + operation = client.create_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.CreateJobRequest, dict]]): + The request object. Message for creating a Job + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.storagebatchoperations_v1.types.Job`): + Required. The resource being created + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (:class:`str`): + Required. The optional ``job_id`` for this Job . If not + specified, an id is generated. ``job_id`` should be no + more than 128 characters and must include only + characters available in DNS names, as defined by + RFC-1123. + + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.storagebatchoperations_v1.types.Job` + The Storage Batch Operations Job description. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, job, job_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.CreateJobRequest): + request = storage_batch_operations.CreateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + storage_batch_operations_types.Job, + metadata_type=storage_batch_operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_job(self, + request: Optional[Union[storage_batch_operations.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_delete_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.DeleteJobRequest, dict]]): + The request object. Message for deleting a Job + name (:class:`str`): + Required. The ``name`` of the job to delete. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id} + . + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.DeleteJobRequest): + request = storage_batch_operations.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_job(self, + request: Optional[Union[storage_batch_operations.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations.CancelJobResponse: + r"""Cancels a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + async def sample_cancel_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + response = await client.cancel_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.storagebatchoperations_v1.types.CancelJobRequest, dict]]): + The request object. Message for Job to Cancel + name (:class:`str`): + Required. The ``name`` of the job to cancel. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.types.CancelJobResponse: + Message for response to cancel Job. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.CancelJobRequest): + request = storage_batch_operations.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "StorageBatchOperationsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "StorageBatchOperationsAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py new file mode 100644 index 000000000000..c5b92561bc74 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py @@ -0,0 +1,1530 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.storagebatchoperations_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import pagers +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import StorageBatchOperationsGrpcTransport +from .transports.grpc_asyncio import StorageBatchOperationsGrpcAsyncIOTransport +from .transports.rest import StorageBatchOperationsRestTransport + + +class StorageBatchOperationsClientMeta(type): + """Metaclass for the StorageBatchOperations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[StorageBatchOperationsTransport]] + _transport_registry["grpc"] = StorageBatchOperationsGrpcTransport + _transport_registry["grpc_asyncio"] = StorageBatchOperationsGrpcAsyncIOTransport + _transport_registry["rest"] = StorageBatchOperationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[StorageBatchOperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class StorageBatchOperationsClient(metaclass=StorageBatchOperationsClientMeta): + """Storage Batch Operations offers a managed experience to + perform batch operations on millions of Cloud Storage objects in + a serverless fashion. With this service, you can automate and + simplify large scale API operations performed on Cloud Storage + objects. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "storagebatchoperations.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "storagebatchoperations.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageBatchOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StorageBatchOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> StorageBatchOperationsTransport: + """Returns the transport used by the client instance. + + Returns: + StorageBatchOperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_path(project: str,location: str,job: str,) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str,str]: + """Parses a job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = StorageBatchOperationsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = StorageBatchOperationsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, StorageBatchOperationsTransport, Callable[..., StorageBatchOperationsTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the storage batch operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,StorageBatchOperationsTransport,Callable[..., StorageBatchOperationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the StorageBatchOperationsTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = StorageBatchOperationsClient._read_environment_variables() + self._client_cert_source = StorageBatchOperationsClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = StorageBatchOperationsClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, StorageBatchOperationsTransport) + if transport_provided: + # transport is a StorageBatchOperationsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(StorageBatchOperationsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + StorageBatchOperationsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[StorageBatchOperationsTransport], Callable[..., StorageBatchOperationsTransport]] = ( + StorageBatchOperationsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., StorageBatchOperationsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient`.", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "credentialsType": None, + } + ) + + def list_jobs(self, + request: Optional[Union[storage_batch_operations.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListJobsPager: + r"""Lists Jobs in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_list_jobs(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.ListJobsRequest, dict]): + The request object. Message for request to list Jobs + parent (str): + Required. Format: + projects/{project_id}/locations/{location_id} . + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsPager: + Message for response to listing Jobs + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.ListJobsRequest): + request = storage_batch_operations.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: Optional[Union[storage_batch_operations.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations_types.Job: + r"""Gets a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_get_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.GetJobRequest, dict]): + The request object. Message for getting a Job + name (str): + Required. ``name`` of the job to retrieve. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id} + . + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.types.Job: + The Storage Batch Operations Job + description. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.GetJobRequest): + request = storage_batch_operations.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_job(self, + request: Optional[Union[storage_batch_operations.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[storage_batch_operations_types.Job] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_create_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + job = storagebatchoperations_v1.Job() + job.bucket_list.buckets.bucket = "bucket_value" + job.put_object_hold.temporary_hold = "UNSET" + job.put_object_hold.event_based_hold = "UNSET" + + request = storagebatchoperations_v1.CreateJobRequest( + parent="parent_value", + job_id="job_id_value", + job=job, + ) + + # Make the request + operation = client.create_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.CreateJobRequest, dict]): + The request object. Message for creating a Job + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.storagebatchoperations_v1.types.Job): + Required. The resource being created + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (str): + Required. The optional ``job_id`` for this Job . If not + specified, an id is generated. ``job_id`` should be no + more than 128 characters and must include only + characters available in DNS names, as defined by + RFC-1123. + + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.storagebatchoperations_v1.types.Job` + The Storage Batch Operations Job description. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, job, job_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.CreateJobRequest): + request = storage_batch_operations.CreateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + storage_batch_operations_types.Job, + metadata_type=storage_batch_operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_job(self, + request: Optional[Union[storage_batch_operations.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_delete_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.DeleteJobRequest, dict]): + The request object. Message for deleting a Job + name (str): + Required. The ``name`` of the job to delete. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id} + . + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.DeleteJobRequest): + request = storage_batch_operations.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_job(self, + request: Optional[Union[storage_batch_operations.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> storage_batch_operations.CancelJobResponse: + r"""Cancels a batch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import storagebatchoperations_v1 + + def sample_cancel_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + response = client.cancel_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.storagebatchoperations_v1.types.CancelJobRequest, dict]): + The request object. Message for Job to Cancel + name (str): + Required. The ``name`` of the job to cancel. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.storagebatchoperations_v1.types.CancelJobResponse: + Message for response to cancel Job. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, storage_batch_operations.CancelJobRequest): + request = storage_batch_operations.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "StorageBatchOperationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "StorageBatchOperationsClient", +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py new file mode 100644 index 000000000000..3dea7c30fb00 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., storage_batch_operations.ListJobsResponse], + request: storage_batch_operations.ListJobsRequest, + response: storage_batch_operations.ListJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storagebatchoperations_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.storagebatchoperations_v1.types.ListJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = storage_batch_operations.ListJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[storage_batch_operations.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[storage_batch_operations_types.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[storage_batch_operations.ListJobsResponse]], + request: storage_batch_operations.ListJobsRequest, + response: storage_batch_operations.ListJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.storagebatchoperations_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.storagebatchoperations_v1.types.ListJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = storage_batch_operations.ListJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[storage_batch_operations.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[storage_batch_operations_types.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst new file mode 100644 index 000000000000..2cc2d87b2b31 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`StorageBatchOperationsTransport` is the ABC for all transports. +- public child `StorageBatchOperationsGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `StorageBatchOperationsGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseStorageBatchOperationsRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `StorageBatchOperationsRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py new file mode 100644 index 000000000000..f4838e6e09e6 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import StorageBatchOperationsTransport +from .grpc import StorageBatchOperationsGrpcTransport +from .grpc_asyncio import StorageBatchOperationsGrpcAsyncIOTransport +from .rest import StorageBatchOperationsRestTransport +from .rest import StorageBatchOperationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[StorageBatchOperationsTransport]] +_transport_registry['grpc'] = StorageBatchOperationsGrpcTransport +_transport_registry['grpc_asyncio'] = StorageBatchOperationsGrpcAsyncIOTransport +_transport_registry['rest'] = StorageBatchOperationsRestTransport + +__all__ = ( + 'StorageBatchOperationsTransport', + 'StorageBatchOperationsGrpcTransport', + 'StorageBatchOperationsGrpcAsyncIOTransport', + 'StorageBatchOperationsRestTransport', + 'StorageBatchOperationsRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py new file mode 100644 index 000000000000..cfdcc1ca5dd4 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.storagebatchoperations_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class StorageBatchOperationsTransport(abc.ABC): + """Abstract transport class for StorageBatchOperations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'storagebatchoperations.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [storage_batch_operations.ListJobsRequest], + Union[ + storage_batch_operations.ListJobsResponse, + Awaitable[storage_batch_operations.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [storage_batch_operations.GetJobRequest], + Union[ + storage_batch_operations_types.Job, + Awaitable[storage_batch_operations_types.Job] + ]]: + raise NotImplementedError() + + @property + def create_job(self) -> Callable[ + [storage_batch_operations.CreateJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_job(self) -> Callable[ + [storage_batch_operations.DeleteJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def cancel_job(self) -> Callable[ + [storage_batch_operations.CancelJobRequest], + Union[ + storage_batch_operations.CancelJobResponse, + Awaitable[storage_batch_operations.CancelJobResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'StorageBatchOperationsTransport', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py new file mode 100644 index 000000000000..f0a5c055f028 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py @@ -0,0 +1,584 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class StorageBatchOperationsGrpcTransport(StorageBatchOperationsTransport): + """gRPC backend transport for StorageBatchOperations. + + Storage Batch Operations offers a managed experience to + perform batch operations on millions of Cloud Storage objects in + a serverless fashion. With this service, you can automate and + simplify large scale API operations performed on Cloud Storage + objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'storagebatchoperations.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'storagebatchoperations.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_jobs(self) -> Callable[ + [storage_batch_operations.ListJobsRequest], + storage_batch_operations.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists Jobs in a given project and location. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/ListJobs', + request_serializer=storage_batch_operations.ListJobsRequest.serialize, + response_deserializer=storage_batch_operations.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [storage_batch_operations.GetJobRequest], + storage_batch_operations_types.Job]: + r"""Return a callable for the get job method over gRPC. + + Gets a batch job. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/GetJob', + request_serializer=storage_batch_operations.GetJobRequest.serialize, + response_deserializer=storage_batch_operations_types.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def create_job(self) -> Callable[ + [storage_batch_operations.CreateJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the create job method over gRPC. + + Creates a batch job. + + Returns: + Callable[[~.CreateJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CreateJob', + request_serializer=storage_batch_operations.CreateJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_job'] + + @property + def delete_job(self) -> Callable[ + [storage_batch_operations.DeleteJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a batch job. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/DeleteJob', + request_serializer=storage_batch_operations.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def cancel_job(self) -> Callable[ + [storage_batch_operations.CancelJobRequest], + storage_batch_operations.CancelJobResponse]: + r"""Return a callable for the cancel job method over gRPC. + + Cancels a batch job. + + Returns: + Callable[[~.CancelJobRequest], + ~.CancelJobResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_job' not in self._stubs: + self._stubs['cancel_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CancelJob', + request_serializer=storage_batch_operations.CancelJobRequest.serialize, + response_deserializer=storage_batch_operations.CancelJobResponse.deserialize, + ) + return self._stubs['cancel_job'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'StorageBatchOperationsGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py new file mode 100644 index 000000000000..78842b6f6356 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py @@ -0,0 +1,682 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO +from .grpc import StorageBatchOperationsGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class StorageBatchOperationsGrpcAsyncIOTransport(StorageBatchOperationsTransport): + """gRPC AsyncIO backend transport for StorageBatchOperations. + + Storage Batch Operations offers a managed experience to + perform batch operations on millions of Cloud Storage objects in + a serverless fashion. With this service, you can automate and + simplify large scale API operations performed on Cloud Storage + objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'storagebatchoperations.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'storagebatchoperations.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_jobs(self) -> Callable[ + [storage_batch_operations.ListJobsRequest], + Awaitable[storage_batch_operations.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists Jobs in a given project and location. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/ListJobs', + request_serializer=storage_batch_operations.ListJobsRequest.serialize, + response_deserializer=storage_batch_operations.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [storage_batch_operations.GetJobRequest], + Awaitable[storage_batch_operations_types.Job]]: + r"""Return a callable for the get job method over gRPC. + + Gets a batch job. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/GetJob', + request_serializer=storage_batch_operations.GetJobRequest.serialize, + response_deserializer=storage_batch_operations_types.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def create_job(self) -> Callable[ + [storage_batch_operations.CreateJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create job method over gRPC. + + Creates a batch job. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CreateJob', + request_serializer=storage_batch_operations.CreateJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_job'] + + @property + def delete_job(self) -> Callable[ + [storage_batch_operations.DeleteJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a batch job. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/DeleteJob', + request_serializer=storage_batch_operations.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def cancel_job(self) -> Callable[ + [storage_batch_operations.CancelJobRequest], + Awaitable[storage_batch_operations.CancelJobResponse]]: + r"""Return a callable for the cancel job method over gRPC. + + Cancels a batch job. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.CancelJobResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_job' not in self._stubs: + self._stubs['cancel_job'] = self._logged_channel.unary_unary( + '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CancelJob', + request_serializer=storage_batch_operations.CancelJobRequest.serialize, + response_deserializer=storage_batch_operations.CancelJobResponse.deserialize, + ) + return self._stubs['cancel_job'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_jobs: self._wrap_method( + self.list_jobs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_job: self._wrap_method( + self.get_job, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_job: self._wrap_method( + self.create_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job: self._wrap_method( + self.delete_job, + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: self._wrap_method( + self.cancel_job, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'StorageBatchOperationsGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py new file mode 100644 index 000000000000..65fda1fe21dd --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py @@ -0,0 +1,1819 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseStorageBatchOperationsRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class StorageBatchOperationsRestInterceptor: + """Interceptor for StorageBatchOperations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the StorageBatchOperationsRestTransport. + + .. code-block:: python + class MyCustomStorageBatchOperationsInterceptor(StorageBatchOperationsRestInterceptor): + def pre_cancel_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + transport = StorageBatchOperationsRestTransport(interceptor=MyCustomStorageBatchOperationsInterceptor()) + client = StorageBatchOperationsClient(transport=transport) + + + """ + def pre_cancel_job(self, request: storage_batch_operations.CancelJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_cancel_job(self, response: storage_batch_operations.CancelJobResponse) -> storage_batch_operations.CancelJobResponse: + """Post-rpc interceptor for cancel_job + + DEPRECATED. Please use the `post_cancel_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. This `post_cancel_job` interceptor runs + before the `post_cancel_job_with_metadata` interceptor. + """ + return response + + def post_cancel_job_with_metadata(self, response: storage_batch_operations.CancelJobResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.CancelJobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StorageBatchOperations server but before it is returned to user code. + + We recommend only using this `post_cancel_job_with_metadata` + interceptor in new development instead of the `post_cancel_job` interceptor. + When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the + `post_cancel_job` interceptor. The (possibly modified) response returned by + `post_cancel_job` will be passed to + `post_cancel_job_with_metadata`. + """ + return response, metadata + + def pre_create_job(self, request: storage_batch_operations.CreateJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.CreateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_create_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_job + + DEPRECATED. Please use the `post_create_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. This `post_create_job` interceptor runs + before the `post_create_job_with_metadata` interceptor. + """ + return response + + def post_create_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StorageBatchOperations server but before it is returned to user code. + + We recommend only using this `post_create_job_with_metadata` + interceptor in new development instead of the `post_create_job` interceptor. + When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the + `post_create_job` interceptor. The (possibly modified) response returned by + `post_create_job` will be passed to + `post_create_job_with_metadata`. + """ + return response, metadata + + def pre_delete_job(self, request: storage_batch_operations.DeleteJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.DeleteJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def pre_get_job(self, request: storage_batch_operations.GetJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_get_job(self, response: storage_batch_operations_types.Job) -> storage_batch_operations_types.Job: + """Post-rpc interceptor for get_job + + DEPRECATED. Please use the `post_get_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. This `post_get_job` interceptor runs + before the `post_get_job_with_metadata` interceptor. + """ + return response + + def post_get_job_with_metadata(self, response: storage_batch_operations_types.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations_types.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StorageBatchOperations server but before it is returned to user code. + + We recommend only using this `post_get_job_with_metadata` + interceptor in new development instead of the `post_get_job` interceptor. + When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the + `post_get_job` interceptor. The (possibly modified) response returned by + `post_get_job` will be passed to + `post_get_job_with_metadata`. + """ + return response, metadata + + def pre_list_jobs(self, request: storage_batch_operations.ListJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_list_jobs(self, response: storage_batch_operations.ListJobsResponse) -> storage_batch_operations.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + DEPRECATED. Please use the `post_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. This `post_list_jobs` interceptor runs + before the `post_list_jobs_with_metadata` interceptor. + """ + return response + + def post_list_jobs_with_metadata(self, response: storage_batch_operations.ListJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StorageBatchOperations server but before it is returned to user code. + + We recommend only using this `post_list_jobs_with_metadata` + interceptor in new development instead of the `post_list_jobs` interceptor. + When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the + `post_list_jobs` interceptor. The (possibly modified) response returned by + `post_list_jobs` will be passed to + `post_list_jobs_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the StorageBatchOperations server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the StorageBatchOperations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class StorageBatchOperationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: StorageBatchOperationsRestInterceptor + + +class StorageBatchOperationsRestTransport(_BaseStorageBatchOperationsRestTransport): + """REST backend synchronous transport for StorageBatchOperations. + + Storage Batch Operations offers a managed experience to + perform batch operations on millions of Cloud Storage objects in + a serverless fashion. With this service, you can automate and + simplify large scale API operations performed on Cloud Storage + objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'storagebatchoperations.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[StorageBatchOperationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or StorageBatchOperationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CancelJob(_BaseStorageBatchOperationsRestTransport._BaseCancelJob, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.CancelJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: storage_batch_operations.CancelJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> storage_batch_operations.CancelJobResponse: + r"""Call the cancel job method over HTTP. + + Args: + request (~.storage_batch_operations.CancelJobRequest): + The request object. Message for Job to Cancel + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.storage_batch_operations.CancelJobResponse: + Message for response to cancel Job. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_http_options() + + request, metadata = self._interceptor.pre_cancel_job(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_transcoded_request(http_options, request) + + body = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.CancelJob", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "CancelJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._CancelJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_batch_operations.CancelJobResponse() + pb_resp = storage_batch_operations.CancelJobResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_cancel_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = storage_batch_operations.CancelJobResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.cancel_job", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "CancelJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateJob(_BaseStorageBatchOperationsRestTransport._BaseCreateJob, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.CreateJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: storage_batch_operations.CreateJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create job method over HTTP. + + Args: + request (~.storage_batch_operations.CreateJobRequest): + The request object. Message for creating a Job + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_http_options() + + request, metadata = self._interceptor.pre_create_job(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_transcoded_request(http_options, request) + + body = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.CreateJob", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "CreateJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._CreateJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.create_job", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "CreateJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteJob(_BaseStorageBatchOperationsRestTransport._BaseDeleteJob, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.DeleteJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: storage_batch_operations.DeleteJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete job method over HTTP. + + Args: + request (~.storage_batch_operations.DeleteJobRequest): + The request object. Message for deleting a Job + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_http_options() + + request, metadata = self._interceptor.pre_delete_job(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.DeleteJob", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "DeleteJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._DeleteJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetJob(_BaseStorageBatchOperationsRestTransport._BaseGetJob, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.GetJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: storage_batch_operations.GetJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> storage_batch_operations_types.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.storage_batch_operations.GetJobRequest): + The request object. Message for getting a Job + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.storage_batch_operations_types.Job: + The Storage Batch Operations Job + description. + + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseGetJob._get_http_options() + + request, metadata = self._interceptor.pre_get_job(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseGetJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetJob", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._GetJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_batch_operations_types.Job() + pb_resp = storage_batch_operations_types.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = storage_batch_operations_types.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.get_job", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListJobs(_BaseStorageBatchOperationsRestTransport._BaseListJobs, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.ListJobs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: storage_batch_operations.ListJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> storage_batch_operations.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.storage_batch_operations.ListJobsRequest): + The request object. Message for request to list Jobs + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.storage_batch_operations.ListJobsResponse: + Message for response to listing Jobs + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseListJobs._get_http_options() + + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListJobs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseListJobs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListJobs", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._ListJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_batch_operations.ListJobsResponse() + pb_resp = storage_batch_operations.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_jobs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = storage_batch_operations.ListJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.list_jobs", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def cancel_job(self) -> Callable[ + [storage_batch_operations.CancelJobRequest], + storage_batch_operations.CancelJobResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job(self) -> Callable[ + [storage_batch_operations.CreateJobRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job(self) -> Callable[ + [storage_batch_operations.DeleteJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[ + [storage_batch_operations.GetJobRequest], + storage_batch_operations_types.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs(self) -> Callable[ + [storage_batch_operations.ListJobsRequest], + storage_batch_operations.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseStorageBatchOperationsRestTransport._BaseGetLocation, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetLocation", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseStorageBatchOperationsRestTransport._BaseListLocations, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListLocations", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseStorageBatchOperationsRestTransport._BaseCancelOperation, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.CancelOperation", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseStorageBatchOperationsRestTransport._BaseDeleteOperation, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseStorageBatchOperationsRestTransport._BaseGetOperation, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetOperation", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseStorageBatchOperationsRestTransport._BaseListOperations, StorageBatchOperationsRestStub): + def __hash__(self): + return hash("StorageBatchOperationsRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseStorageBatchOperationsRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseStorageBatchOperationsRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListOperations", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = StorageBatchOperationsRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'StorageBatchOperationsRestTransport', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py new file mode 100644 index 000000000000..733154701bbd --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py @@ -0,0 +1,455 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseStorageBatchOperationsRestTransport(StorageBatchOperationsTransport): + """Base REST backend transport for StorageBatchOperations. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'storagebatchoperations.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCancelJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/jobs/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.CancelJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "jobId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/jobs', + 'body': 'job', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.CreateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/jobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.DeleteJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/jobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseStorageBatchOperationsRestTransport._BaseGetJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListJobs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/jobs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = storage_batch_operations.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseStorageBatchOperationsRestTransport._BaseListJobs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseStorageBatchOperationsRestTransport', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py new file mode 100644 index 000000000000..ebd4cde2da8d --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .storage_batch_operations import ( + CancelJobRequest, + CancelJobResponse, + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + ListJobsRequest, + ListJobsResponse, + OperationMetadata, +) +from .storage_batch_operations_types import ( + BucketList, + Counters, + DeleteObject, + ErrorLogEntry, + ErrorSummary, + Job, + LoggingConfig, + Manifest, + PrefixList, + PutMetadata, + PutObjectHold, + RewriteObject, +) + +__all__ = ( + 'CancelJobRequest', + 'CancelJobResponse', + 'CreateJobRequest', + 'DeleteJobRequest', + 'GetJobRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'OperationMetadata', + 'BucketList', + 'Counters', + 'DeleteObject', + 'ErrorLogEntry', + 'ErrorSummary', + 'Job', + 'LoggingConfig', + 'Manifest', + 'PrefixList', + 'PutMetadata', + 'PutObjectHold', + 'RewriteObject', +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py new file mode 100644 index 000000000000..2f2ad62038a6 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.storagebatchoperations.v1', + manifest={ + 'ListJobsRequest', + 'ListJobsResponse', + 'GetJobRequest', + 'CreateJobRequest', + 'CancelJobRequest', + 'DeleteJobRequest', + 'CancelJobResponse', + 'OperationMetadata', + }, +) + + +class ListJobsRequest(proto.Message): + r"""Message for request to list Jobs + + Attributes: + parent (str): + Required. Format: + projects/{project_id}/locations/{location_id} . + filter (str): + Optional. Filters results as defined by + https://google.aip.dev/160. + page_size (int): + Optional. The list page size. default page + size is 100. + page_token (str): + Optional. The list page token. + order_by (str): + Optional. Field to sort by. Supported fields are name, + create_time. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobsResponse(proto.Message): + r"""Message for response to listing Jobs + + Attributes: + jobs (MutableSequence[google.cloud.storagebatchoperations_v1.types.Job]): + A list of storage batch jobs. + next_page_token (str): + A token identifying a page of results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence[storage_batch_operations_types.Job] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage_batch_operations_types.Job, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetJobRequest(proto.Message): + r"""Message for getting a Job + + Attributes: + name (str): + Required. ``name`` of the job to retrieve. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id} + . + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobRequest(proto.Message): + r"""Message for creating a Job + + Attributes: + parent (str): + Required. Value for parent. + job_id (str): + Required. The optional ``job_id`` for this Job . If not + specified, an id is generated. ``job_id`` should be no more + than 128 characters and must include only characters + available in DNS names, as defined by RFC-1123. + job (google.cloud.storagebatchoperations_v1.types.Job): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify requests. + Specify a unique request ID in case you need to retry your + request. Requests with same ``request_id`` will ignored for + at least 60 minutes since the first request. The request ID + must be a valid UUID with the exception that zero UUID is + not supported (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + job: storage_batch_operations_types.Job = proto.Field( + proto.MESSAGE, + number=3, + message=storage_batch_operations_types.Job, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CancelJobRequest(proto.Message): + r"""Message for Job to Cancel + + Attributes: + name (str): + Required. The ``name`` of the job to cancel. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id}. + request_id (str): + Optional. An optional request ID to identify requests. + Specify a unique request ID in case you need to retry your + request. Requests with same ``request_id`` will ignored for + at least 60 minutes since the first request. The request ID + must be a valid UUID with the exception that zero UUID is + not supported (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteJobRequest(proto.Message): + r"""Message for deleting a Job + + Attributes: + name (str): + Required. The ``name`` of the job to delete. Format: + projects/{project_id}/locations/{location_id}/jobs/{job_id} + . + request_id (str): + Optional. An optional request ID to identify requests. + Specify a unique request ID in case you need to retry your + request. Requests with same ``request_id`` will ignored for + at least 60 minutes since the first request. The request ID + must be a valid UUID with the exception that zero UUID is + not supported (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelJobResponse(proto.Message): + r"""Message for response to cancel Job. + """ + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + operation (str): + Output only. The unique operation resource + name. Format: + projects/{project}/locations/{location}/operations/{operation}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have + [google.longrunning.Operation.error][google.longrunning.Operation.error] + value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + job (google.cloud.storagebatchoperations_v1.types.Job): + Output only. The Job associated with the + operation. + """ + + operation: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=7, + ) + api_version: str = proto.Field( + proto.STRING, + number=8, + ) + job: storage_batch_operations_types.Job = proto.Field( + proto.MESSAGE, + number=10, + message=storage_batch_operations_types.Job, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py new file mode 100644 index 000000000000..30b47c85a51f --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py @@ -0,0 +1,653 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.storagebatchoperations.v1', + manifest={ + 'Job', + 'BucketList', + 'Manifest', + 'PrefixList', + 'PutObjectHold', + 'DeleteObject', + 'RewriteObject', + 'PutMetadata', + 'ErrorSummary', + 'ErrorLogEntry', + 'Counters', + 'LoggingConfig', + }, +) + + +class Job(proto.Message): + r"""The Storage Batch Operations Job description. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the Job. job_id is unique + within the project, that is either set by the customer or + defined by the service. Format: + projects/{project}/locations/global/jobs/{job_id} . For + example: "projects/123456/locations/global/jobs/job01". + description (str): + Optional. A description provided by the user + for the job. Its max length is 1024 bytes when + Unicode-encoded. + bucket_list (google.cloud.storagebatchoperations_v1.types.BucketList): + Specifies a list of buckets and their objects + to be transformed. + + This field is a member of `oneof`_ ``source``. + put_object_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold): + Changes object hold status. + + This field is a member of `oneof`_ ``transformation``. + delete_object (google.cloud.storagebatchoperations_v1.types.DeleteObject): + Delete objects. + + This field is a member of `oneof`_ ``transformation``. + put_metadata (google.cloud.storagebatchoperations_v1.types.PutMetadata): + Updates object metadata. Allows updating + fixed-key and custom metadata and fixed-key + metadata i.e. Cache-Control, + Content-Disposition, Content-Encoding, + Content-Language, Content-Type, Custom-Time. + + This field is a member of `oneof`_ ``transformation``. + rewrite_object (google.cloud.storagebatchoperations_v1.types.RewriteObject): + Rewrite the object and updates metadata like + KMS key. + + This field is a member of `oneof`_ ``transformation``. + logging_config (google.cloud.storagebatchoperations_v1.types.LoggingConfig): + Optional. Logging configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the job was + created. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the job was + scheduled. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the job was + completed. + counters (google.cloud.storagebatchoperations_v1.types.Counters): + Output only. Information about the progress + of the job. + error_summaries (MutableSequence[google.cloud.storagebatchoperations_v1.types.ErrorSummary]): + Output only. Summarizes errors encountered + with sample error log entries. + state (google.cloud.storagebatchoperations_v1.types.Job.State): + Output only. State of the job. + """ + class State(proto.Enum): + r"""Describes state of a job. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + RUNNING (1): + In progress. + SUCCEEDED (2): + Completed successfully. + CANCELED (3): + Cancelled by the user. + FAILED (4): + Terminated due to an unrecoverable failure. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + CANCELED = 3 + FAILED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + bucket_list: 'BucketList' = proto.Field( + proto.MESSAGE, + number=19, + oneof='source', + message='BucketList', + ) + put_object_hold: 'PutObjectHold' = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='PutObjectHold', + ) + delete_object: 'DeleteObject' = proto.Field( + proto.MESSAGE, + number=6, + oneof='transformation', + message='DeleteObject', + ) + put_metadata: 'PutMetadata' = proto.Field( + proto.MESSAGE, + number=8, + oneof='transformation', + message='PutMetadata', + ) + rewrite_object: 'RewriteObject' = proto.Field( + proto.MESSAGE, + number=20, + oneof='transformation', + message='RewriteObject', + ) + logging_config: 'LoggingConfig' = proto.Field( + proto.MESSAGE, + number=9, + message='LoggingConfig', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + counters: 'Counters' = proto.Field( + proto.MESSAGE, + number=13, + message='Counters', + ) + error_summaries: MutableSequence['ErrorSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message='ErrorSummary', + ) + state: State = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + + +class BucketList(proto.Message): + r"""Describes list of buckets and their objects to be + transformed. + + Attributes: + buckets (MutableSequence[google.cloud.storagebatchoperations_v1.types.BucketList.Bucket]): + Required. List of buckets and their objects + to be transformed. Currently, only one bucket + configuration is supported. If multiple buckets + are specified, an error will be returned. + """ + + class Bucket(proto.Message): + r"""Describes configuration of a single bucket and its objects to + be transformed. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket (str): + Required. Bucket name for the objects to be + transformed. + prefix_list (google.cloud.storagebatchoperations_v1.types.PrefixList): + Specifies objects matching a prefix set. + + This field is a member of `oneof`_ ``object_configuration``. + manifest (google.cloud.storagebatchoperations_v1.types.Manifest): + Specifies objects in a manifest file. + + This field is a member of `oneof`_ ``object_configuration``. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + prefix_list: 'PrefixList' = proto.Field( + proto.MESSAGE, + number=2, + oneof='object_configuration', + message='PrefixList', + ) + manifest: 'Manifest' = proto.Field( + proto.MESSAGE, + number=3, + oneof='object_configuration', + message='Manifest', + ) + + buckets: MutableSequence[Bucket] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class Manifest(proto.Message): + r"""Describes list of objects to be transformed. + + Attributes: + manifest_location (str): + Required. ``manifest_location`` must contain the manifest + source file that is a CSV file in a Google Cloud Storage + bucket. Each row in the file must include the object details + i.e. BucketId and Name. Generation may optionally be + specified. When it is not specified the live object is acted + upon. ``manifest_location`` should either be + + 1) An absolute path to the object in the format of + gs://bucket_name/path/file_name.csv. + 2) An absolute path with a single wildcard character in the + file name, for example + gs://bucket_name/path/file_name*.csv. If manifest + location is specified with a wildcard, objects in all + manifest files matching the pattern will be acted upon. + """ + + manifest_location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PrefixList(proto.Message): + r"""Describes prefixes of objects to be transformed. + + Attributes: + included_object_prefixes (MutableSequence[str]): + Optional. Include prefixes of the objects to be transformed. + + - Supports full object name + - Supports prefix of the object name + - Wildcards are not supported + - Supports empty string for all objects in a bucket. + """ + + included_object_prefixes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class PutObjectHold(proto.Message): + r"""Describes options to update object hold. + + Attributes: + temporary_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold.HoldStatus): + Required. Updates object temporary holds + state. When object temporary hold is set, object + cannot be deleted or replaced. + event_based_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold.HoldStatus): + Required. Updates object event based holds + state. When object event based hold is set, + object cannot be deleted or replaced. Resets + object's time in the bucket for the purposes of + the retention period. + """ + class HoldStatus(proto.Enum): + r"""Describes the status of the hold. + + Values: + HOLD_STATUS_UNSPECIFIED (0): + Default value, Object hold status will not be + changed. + SET (1): + Places the hold. + UNSET (2): + Releases the hold. + """ + HOLD_STATUS_UNSPECIFIED = 0 + SET = 1 + UNSET = 2 + + temporary_hold: HoldStatus = proto.Field( + proto.ENUM, + number=1, + enum=HoldStatus, + ) + event_based_hold: HoldStatus = proto.Field( + proto.ENUM, + number=2, + enum=HoldStatus, + ) + + +class DeleteObject(proto.Message): + r"""Describes options to delete an object. + + Attributes: + permanent_object_deletion_enabled (bool): + Required. Controls deletion behavior when + versioning is enabled for the object's bucket. + If true both live and noncurrent objects will be + permanently deleted. Otherwise live objects in + versioned buckets will become noncurrent and + objects that were already noncurrent will be + skipped. This setting doesn't have any impact on + the Soft Delete feature. All objects deleted by + this service can be be restored for the duration + of the Soft Delete retention duration if + enabled. If enabled and the manifest doesn't + specify an object's generation, a + GetObjectMetadata call (a Class B operation) + will be made to determine the live object + generation. + """ + + permanent_object_deletion_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class RewriteObject(proto.Message): + r"""Describes options for object rewrite. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key (str): + Required. Resource name of the Cloud KMS key + that will be used to encrypt the object. The + Cloud KMS key must be located in same location + as the object. Refer to + https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys#add-object-key + for additional documentation. Format: + + projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key} + For example: + + "projects/123456/locations/us-central1/keyRings/my-keyring/cryptoKeys/my-key". + The object will be rewritten and set with the + specified KMS key. + + This field is a member of `oneof`_ ``_kms_key``. + """ + + kms_key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + +class PutMetadata(proto.Message): + r"""Describes options for object metadata update. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + content_disposition (str): + Optional. Updates objects Content-Disposition + fixed metadata. Unset values will be ignored. + Set empty values to clear the metadata. Refer + https://cloud.google.com/storage/docs/metadata#content-disposition + for additional documentation. + + This field is a member of `oneof`_ ``_content_disposition``. + content_encoding (str): + Optional. Updates objects Content-Encoding + fixed metadata. Unset values will be ignored. + Set empty values to clear the metadata. Refer to + documentation in + https://cloud.google.com/storage/docs/metadata#content-encoding. + + This field is a member of `oneof`_ ``_content_encoding``. + content_language (str): + Optional. Updates objects Content-Language + fixed metadata. Refer to ISO 639-1 language + codes for typical values of this metadata. Max + length 100 characters. Unset values will be + ignored. Set empty values to clear the metadata. + Refer to documentation in + https://cloud.google.com/storage/docs/metadata#content-language. + + This field is a member of `oneof`_ ``_content_language``. + content_type (str): + Optional. Updates objects Content-Type fixed + metadata. Unset values will be ignored. + Set empty values to clear the metadata. Refer + to documentation in + https://cloud.google.com/storage/docs/metadata#content-type + + This field is a member of `oneof`_ ``_content_type``. + cache_control (str): + Optional. Updates objects Cache-Control fixed metadata. + Unset values will be ignored. Set empty values to clear the + metadata. Additionally, the value for Custom-Time cannot + decrease. Refer to documentation in + https://cloud.google.com/storage/docs/metadata#caching_data. + + This field is a member of `oneof`_ ``_cache_control``. + custom_time (str): + Optional. Updates objects Custom-Time fixed + metadata. Unset values will be ignored. Set + empty values to clear the metadata. Refer to + documentation in + https://cloud.google.com/storage/docs/metadata#custom-time. + + This field is a member of `oneof`_ ``_custom_time``. + custom_metadata (MutableMapping[str, str]): + Optional. Updates objects custom metadata. + Adds or sets individual custom metadata key + value pairs on objects. Keys that are set with + empty custom metadata values will have its value + cleared. Existing custom metadata not specified + with this flag is not changed. Refer to + documentation in + https://cloud.google.com/storage/docs/metadata#custom-metadata + """ + + content_disposition: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + content_encoding: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + content_language: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + content_type: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + cache_control: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + custom_time: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + custom_metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + + +class ErrorSummary(proto.Message): + r"""A summary of errors by error code, plus a count and sample + error log entries. + + Attributes: + error_code (google.rpc.code_pb2.Code): + Required. The canonical error code. + error_count (int): + Required. Number of errors encountered per ``error_code``. + error_log_entries (MutableSequence[google.cloud.storagebatchoperations_v1.types.ErrorLogEntry]): + Required. Sample error logs. + """ + + error_code: code_pb2.Code = proto.Field( + proto.ENUM, + number=1, + enum=code_pb2.Code, + ) + error_count: int = proto.Field( + proto.INT64, + number=2, + ) + error_log_entries: MutableSequence['ErrorLogEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ErrorLogEntry', + ) + + +class ErrorLogEntry(proto.Message): + r"""An entry describing an error that has occurred. + + Attributes: + object_uri (str): + Required. Output only. Object URL. e.g. + gs://my_bucket/object.txt + error_details (MutableSequence[str]): + Optional. Output only. At most 5 error log + entries are recorded for a given error code for + a job. + """ + + object_uri: str = proto.Field( + proto.STRING, + number=1, + ) + error_details: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class Counters(proto.Message): + r"""Describes details about the progress of the job. + + Attributes: + total_object_count (int): + Output only. Number of objects listed. + succeeded_object_count (int): + Output only. Number of objects completed. + failed_object_count (int): + Output only. Number of objects failed. + """ + + total_object_count: int = proto.Field( + proto.INT64, + number=1, + ) + succeeded_object_count: int = proto.Field( + proto.INT64, + number=2, + ) + failed_object_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class LoggingConfig(proto.Message): + r"""Specifies the Cloud Logging behavior. + + Attributes: + log_actions (MutableSequence[google.cloud.storagebatchoperations_v1.types.LoggingConfig.LoggableAction]): + Required. Specifies the actions to be logged. + log_action_states (MutableSequence[google.cloud.storagebatchoperations_v1.types.LoggingConfig.LoggableActionState]): + Required. States in which Action are + logged.If empty, no logs are generated. + """ + class LoggableAction(proto.Enum): + r"""Loggable actions types. + + Values: + LOGGABLE_ACTION_UNSPECIFIED (0): + Illegal value, to avoid allowing a default. + TRANSFORM (6): + The corresponding transform action in this + job. + """ + LOGGABLE_ACTION_UNSPECIFIED = 0 + TRANSFORM = 6 + + class LoggableActionState(proto.Enum): + r"""Loggable action states filter. + + Values: + LOGGABLE_ACTION_STATE_UNSPECIFIED (0): + Illegal value, to avoid allowing a default. + SUCCEEDED (1): + ``LoggableAction`` completed successfully. ``SUCCEEDED`` + actions are logged as + [INFO][google.logging.type.LogSeverity.INFO]. + FAILED (2): + ``LoggableAction`` terminated in an error state. ``FAILED`` + actions are logged as + [ERROR][google.logging.type.LogSeverity.ERROR]. + """ + LOGGABLE_ACTION_STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + log_actions: MutableSequence[LoggableAction] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=LoggableAction, + ) + log_action_states: MutableSequence[LoggableActionState] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=LoggableActionState, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini b/owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py new file mode 100644 index 000000000000..69439bb62866 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-storagebatchoperations" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json new file mode 100644 index 000000000000..bcbed4715bdb --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json @@ -0,0 +1,830 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.storagebatchoperations.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-storagebatchoperations", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.cancel_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CancelJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.types.CancelJobResponse", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.cancel_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CancelJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.types.CancelJobResponse", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.create_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CreateJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "CreateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.CreateJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.storagebatchoperations_v1.types.Job" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_job" + }, + "description": "Sample for CreateJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.create_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CreateJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "CreateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.CreateJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.storagebatchoperations_v1.types.Job" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_job" + }, + "description": "Sample for CreateJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.delete_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.DeleteJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "DeleteJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.DeleteJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_job" + }, + "description": "Sample for DeleteJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.delete_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.DeleteJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "DeleteJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.DeleteJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_job" + }, + "description": "Sample for DeleteJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.get_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.GetJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.get_job", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.GetJob", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", + "shortName": "StorageBatchOperationsAsyncClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.list_jobs", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.ListJobs", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", + "shortName": "StorageBatchOperationsClient" + }, + "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.list_jobs", + "method": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.ListJobs", + "service": { + "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", + "shortName": "StorageBatchOperations" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.storagebatchoperations_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py new file mode 100644 index 000000000000..783ed99d0ee2 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_cancel_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + response = await client.cancel_job(request=request) + + # Handle the response + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py new file mode 100644 index 000000000000..465b54222faa --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_cancel_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + response = client.cancel_job(request=request) + + # Handle the response + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py new file mode 100644 index 000000000000..75b9e4cabe92 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_create_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + job = storagebatchoperations_v1.Job() + job.bucket_list.buckets.bucket = "bucket_value" + job.put_object_hold.temporary_hold = "UNSET" + job.put_object_hold.event_based_hold = "UNSET" + + request = storagebatchoperations_v1.CreateJobRequest( + parent="parent_value", + job_id="job_id_value", + job=job, + ) + + # Make the request + operation = client.create_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py new file mode 100644 index 000000000000..25f1cbfb49f8 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_create_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + job = storagebatchoperations_v1.Job() + job.bucket_list.buckets.bucket = "bucket_value" + job.put_object_hold.temporary_hold = "UNSET" + job.put_object_hold.event_based_hold = "UNSET" + + request = storagebatchoperations_v1.CreateJobRequest( + parent="parent_value", + job_id="job_id_value", + job=job, + ) + + # Make the request + operation = client.create_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py new file mode 100644 index 000000000000..1a4115e07be4 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_delete_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py new file mode 100644 index 000000000000..aa2c978f56ba --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_delete_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py new file mode 100644 index 000000000000..49d19bee99ac --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_get_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py new file mode 100644 index 000000000000..82a981aae6a6 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_get_job(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py new file mode 100644 index 000000000000..e8e67403df1e --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +async def sample_list_jobs(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py new file mode 100644 index 000000000000..3c5c90f47bef --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-storagebatchoperations + + +# [START storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import storagebatchoperations_v1 + + +def sample_list_jobs(): + # Create a client + client = storagebatchoperations_v1.StorageBatchOperationsClient() + + # Initialize request argument(s) + request = storagebatchoperations_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py new file mode 100644 index 000000000000..598664166809 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class storagebatchoperationsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('name', 'request_id', ), + 'create_job': ('parent', 'job_id', 'job', 'request_id', ), + 'delete_job': ('name', 'request_id', ), + 'get_job': ('name', ), + 'list_jobs': ('parent', 'filter', 'page_size', 'page_token', 'order_by', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=storagebatchoperationsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the storagebatchoperations client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py new file mode 100644 index 000000000000..69c4a0db82f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-storagebatchoperations' + + +description = "Google Cloud Storagebatchoperations API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/storagebatchoperations/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storagebatchoperations" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..c20a77817caa --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..a77f12bc13e4 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py new file mode 100644 index 000000000000..53c7521b6234 --- /dev/null +++ b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py @@ -0,0 +1,6318 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import StorageBatchOperationsAsyncClient +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import StorageBatchOperationsClient +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import pagers +from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import transports +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations +from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert StorageBatchOperationsClient._get_default_mtls_endpoint(None) is None + assert StorageBatchOperationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert StorageBatchOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert StorageBatchOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert StorageBatchOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert StorageBatchOperationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert StorageBatchOperationsClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + StorageBatchOperationsClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert StorageBatchOperationsClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert StorageBatchOperationsClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + StorageBatchOperationsClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert StorageBatchOperationsClient._get_client_cert_source(None, False) is None + assert StorageBatchOperationsClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert StorageBatchOperationsClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert StorageBatchOperationsClient._get_client_cert_source(None, True) is mock_default_cert_source + assert StorageBatchOperationsClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) +@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = StorageBatchOperationsClient._DEFAULT_UNIVERSE + default_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert StorageBatchOperationsClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert StorageBatchOperationsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT + assert StorageBatchOperationsClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert StorageBatchOperationsClient._get_api_endpoint(None, None, default_universe, "always") == StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT + assert StorageBatchOperationsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT + assert StorageBatchOperationsClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert StorageBatchOperationsClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + StorageBatchOperationsClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert StorageBatchOperationsClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert StorageBatchOperationsClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert StorageBatchOperationsClient._get_universe_domain(None, None) == StorageBatchOperationsClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + StorageBatchOperationsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = StorageBatchOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = StorageBatchOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (StorageBatchOperationsClient, "grpc"), + (StorageBatchOperationsAsyncClient, "grpc_asyncio"), + (StorageBatchOperationsClient, "rest"), +]) +def test_storage_batch_operations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'storagebatchoperations.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://storagebatchoperations.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.StorageBatchOperationsGrpcTransport, "grpc"), + (transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.StorageBatchOperationsRestTransport, "rest"), +]) +def test_storage_batch_operations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (StorageBatchOperationsClient, "grpc"), + (StorageBatchOperationsAsyncClient, "grpc_asyncio"), + (StorageBatchOperationsClient, "rest"), +]) +def test_storage_batch_operations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'storagebatchoperations.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://storagebatchoperations.googleapis.com' + ) + + +def test_storage_batch_operations_client_get_transport_class(): + transport = StorageBatchOperationsClient.get_transport_class() + available_transports = [ + transports.StorageBatchOperationsGrpcTransport, + transports.StorageBatchOperationsRestTransport, + ] + assert transport in available_transports + + transport = StorageBatchOperationsClient.get_transport_class("grpc") + assert transport == transports.StorageBatchOperationsGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc"), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio"), + (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest"), +]) +@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) +@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) +def test_storage_batch_operations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(StorageBatchOperationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(StorageBatchOperationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", "true"), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", "false"), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest", "true"), + (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest", "false"), +]) +@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) +@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_storage_batch_operations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + StorageBatchOperationsClient, StorageBatchOperationsAsyncClient +]) +@mock.patch.object(StorageBatchOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(StorageBatchOperationsClient)) +@mock.patch.object(StorageBatchOperationsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(StorageBatchOperationsAsyncClient)) +def test_storage_batch_operations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + StorageBatchOperationsClient, StorageBatchOperationsAsyncClient +]) +@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) +@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) +def test_storage_batch_operations_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = StorageBatchOperationsClient._DEFAULT_UNIVERSE + default_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc"), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio"), + (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest"), +]) +def test_storage_batch_operations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", grpc_helpers), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest", None), +]) +def test_storage_batch_operations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_storage_batch_operations_client_client_options_from_dict(): + with mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = StorageBatchOperationsClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", grpc_helpers), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_storage_batch_operations_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "storagebatchoperations.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="storagebatchoperations.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.ListJobsRequest, + dict, +]) +def test_list_jobs(request_type, transport: str = 'grpc'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.ListJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.ListJobsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.ListJobsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + order_by='order_by_value', + ) + +def test_list_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc + + request = {} + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.ListJobsRequest): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.ListJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + +def test_list_jobs_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = storage_batch_operations.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_jobs_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + storage_batch_operations.ListJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + storage_batch_operations.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + next_page_token='abc', + ), + storage_batch_operations.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + ], + next_page_token='ghi', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storage_batch_operations_types.Job) + for i in results) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + next_page_token='abc', + ), + storage_batch_operations.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + ], + next_page_token='ghi', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + next_page_token='abc', + ), + storage_batch_operations.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + ], + next_page_token='ghi', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, storage_batch_operations_types.Job) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + next_page_token='abc', + ), + storage_batch_operations.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + ], + next_page_token='ghi', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.GetJobRequest, + dict, +]) +def test_get_job(request_type, transport: str = 'grpc'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations_types.Job( + name='name_value', + description='description_value', + state=storage_batch_operations_types.Job.State.RUNNING, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.GetJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations_types.Job) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.state == storage_batch_operations_types.Job.State.RUNNING + + +def test_get_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.GetJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.GetJobRequest( + name='name_value', + ) + +def test_get_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + request = {} + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc + + request = {} + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.GetJobRequest): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job( + name='name_value', + description='description_value', + state=storage_batch_operations_types.Job.State.RUNNING, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.GetJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations_types.Job) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.state == storage_batch_operations_types.Job.State.RUNNING + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + +def test_get_job_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = storage_batch_operations_types.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations_types.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + storage_batch_operations.GetJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations_types.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + storage_batch_operations.GetJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.CreateJobRequest, + dict, +]) +def test_create_job(request_type, transport: str = 'grpc'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.CreateJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.CreateJobRequest( + parent='parent_value', + job_id='job_id_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.CreateJobRequest( + parent='parent_value', + job_id='job_id_value', + request_id='request_id_value', + ) + +def test_create_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + request = {} + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_job] = mock_rpc + + request = {} + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.CreateJobRequest): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.CreateJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + +def test_create_job_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.CreateJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.CreateJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job( + parent='parent_value', + job=storage_batch_operations_types.Job(name='name_value'), + job_id='job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job + mock_val = storage_batch_operations_types.Job(name='name_value') + assert arg == mock_val + arg = args[0].job_id + mock_val = 'job_id_value' + assert arg == mock_val + + +def test_create_job_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + storage_batch_operations.CreateJobRequest(), + parent='parent_value', + job=storage_batch_operations_types.Job(name='name_value'), + job_id='job_id_value', + ) + +@pytest.mark.asyncio +async def test_create_job_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job( + parent='parent_value', + job=storage_batch_operations_types.Job(name='name_value'), + job_id='job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job + mock_val = storage_batch_operations_types.Job(name='name_value') + assert arg == mock_val + arg = args[0].job_id + mock_val = 'job_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job( + storage_batch_operations.CreateJobRequest(), + parent='parent_value', + job=storage_batch_operations_types.Job(name='name_value'), + job_id='job_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.DeleteJobRequest, + dict, +]) +def test_delete_job(request_type, transport: str = 'grpc'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.DeleteJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.DeleteJobRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.DeleteJobRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_delete_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + request = {} + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_job] = mock_rpc + + request = {} + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.DeleteJobRequest): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.DeleteJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + +def test_delete_job_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.DeleteJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.DeleteJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + storage_batch_operations.DeleteJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + storage_batch_operations.DeleteJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.CancelJobRequest, + dict, +]) +def test_cancel_job(request_type, transport: str = 'grpc'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.CancelJobResponse( + ) + response = client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations.CancelJobResponse) + + +def test_cancel_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = storage_batch_operations.CancelJobRequest( + name='name_value', + request_id='request_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.cancel_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == storage_batch_operations.CancelJobRequest( + name='name_value', + request_id='request_id_value', + ) + +def test_cancel_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.cancel_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc + + request = {} + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.CancelJobRequest): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse( + )) + response = await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = storage_batch_operations.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations.CancelJobResponse) + + +@pytest.mark.asyncio +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) + +def test_cancel_job_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.CancelJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = storage_batch_operations.CancelJobResponse() + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = storage_batch_operations.CancelJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse()) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_cancel_job_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.CancelJobResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_cancel_job_flattened_error(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + storage_batch_operations.CancelJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_cancel_job_flattened_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = storage_batch_operations.CancelJobResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_job_flattened_error_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_job( + storage_batch_operations.CancelJobRequest(), + name='name_value', + ) + + +def test_list_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_jobs_rest_required_fields(request_type=storage_batch_operations.ListJobsRequest): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.ListJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_jobs_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_jobs_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.ListJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_batch_operations.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) + + +def test_list_jobs_rest_flattened_error(transport: str = 'rest'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + storage_batch_operations.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_rest_pager(transport: str = 'rest'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + next_page_token='abc', + ), + storage_batch_operations.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + ], + next_page_token='ghi', + ), + storage_batch_operations.ListJobsResponse( + jobs=[ + storage_batch_operations_types.Job(), + storage_batch_operations_types.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(storage_batch_operations.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storage_batch_operations_types.Job) + for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + + request = {} + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_job_rest_required_fields(request_type=storage_batch_operations.GetJobRequest): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations_types.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations_types.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_job_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_job_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations_types.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_batch_operations_types.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) + + +def test_get_job_rest_flattened_error(transport: str = 'rest'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + storage_batch_operations.GetJobRequest(), + name='name_value', + ) + + +def test_create_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + + request = {} + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_job_rest_required_fields(request_type=storage_batch_operations.CreateJobRequest): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["job_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "jobId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == request_init["job_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["jobId"] = 'job_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("job_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == 'job_id_value' + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_job(request) + + expected_params = [ + ( + "jobId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_job_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("jobId", "requestId", )) & set(("parent", "jobId", "job", ))) + + +def test_create_job_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + job=storage_batch_operations_types.Job(name='name_value'), + job_id='job_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) + + +def test_create_job_rest_flattened_error(transport: str = 'rest'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + storage_batch_operations.CreateJobRequest(), + parent='parent_value', + job=storage_batch_operations_types.Job(name='name_value'), + job_id='job_id_value', + ) + + +def test_delete_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + + request = {} + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_job_rest_required_fields(request_type=storage_batch_operations.DeleteJobRequest): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_job_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + + +def test_delete_job_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) + + +def test_delete_job_rest_flattened_error(transport: str = 'rest'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + storage_batch_operations.DeleteJobRequest(), + name='name_value', + ) + + +def test_cancel_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_job_rest_required_fields(request_type=storage_batch_operations.CancelJobRequest): + transport_class = transports.StorageBatchOperationsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.CancelJobResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.CancelJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_job_rest_unset_required_fields(): + transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_cancel_job_rest_flattened(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.CancelJobResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_batch_operations.CancelJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.cancel_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}:cancel" % client.transport._host, args[1]) + + +def test_cancel_job_rest_flattened_error(transport: str = 'rest'): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + storage_batch_operations.CancelJobRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.StorageBatchOperationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.StorageBatchOperationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StorageBatchOperationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.StorageBatchOperationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StorageBatchOperationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StorageBatchOperationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.StorageBatchOperationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StorageBatchOperationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.StorageBatchOperationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = StorageBatchOperationsClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.StorageBatchOperationsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.StorageBatchOperationsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.StorageBatchOperationsGrpcTransport, + transports.StorageBatchOperationsGrpcAsyncIOTransport, + transports.StorageBatchOperationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = StorageBatchOperationsClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_jobs_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = storage_batch_operations.ListJobsResponse() + client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = storage_batch_operations_types.Job() + client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_job_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CreateJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_job_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = None + client.delete_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.DeleteJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = storage_batch_operations.CancelJobResponse() + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CancelJobRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = StorageBatchOperationsAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_jobs_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_job_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job( + name='name_value', + description='description_value', + state=storage_batch_operations_types.Job.State.RUNNING, + )) + await client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_job_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CreateJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_job_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.DeleteJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_job_empty_call_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse( + )) + await client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CancelJobRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = StorageBatchOperationsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_jobs_rest_bad_request(request_type=storage_batch_operations.ListJobsRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_jobs(request) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.ListJobsRequest, + dict, +]) +def test_list_jobs_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_list_jobs") as post, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_list_jobs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_list_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = storage_batch_operations.ListJobsRequest.pb(storage_batch_operations.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = storage_batch_operations.ListJobsResponse.to_json(storage_batch_operations.ListJobsResponse()) + req.return_value.content = return_value + + request = storage_batch_operations.ListJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storage_batch_operations.ListJobsResponse() + post_with_metadata.return_value = storage_batch_operations.ListJobsResponse(), metadata + + client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_job_rest_bad_request(request_type=storage_batch_operations.GetJobRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_job(request) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.GetJobRequest, + dict, +]) +def test_get_job_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations_types.Job( + name='name_value', + description='description_value', + state=storage_batch_operations_types.Job.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations_types.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations_types.Job) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.state == storage_batch_operations_types.Job.State.RUNNING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_get_job") as post, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_get_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_get_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = storage_batch_operations.GetJobRequest.pb(storage_batch_operations.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = storage_batch_operations_types.Job.to_json(storage_batch_operations_types.Job()) + req.return_value.content = return_value + + request = storage_batch_operations.GetJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storage_batch_operations_types.Job() + post_with_metadata.return_value = storage_batch_operations_types.Job(), metadata + + client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_job_rest_bad_request(request_type=storage_batch_operations.CreateJobRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_job(request) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.CreateJobRequest, + dict, +]) +def test_create_job_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["job"] = {'name': 'name_value', 'description': 'description_value', 'bucket_list': {'buckets': [{'bucket': 'bucket_value', 'prefix_list': {'included_object_prefixes': ['included_object_prefixes_value1', 'included_object_prefixes_value2']}, 'manifest': {'manifest_location': 'manifest_location_value'}}]}, 'put_object_hold': {'temporary_hold': 1, 'event_based_hold': 1}, 'delete_object': {'permanent_object_deletion_enabled': True}, 'put_metadata': {'content_disposition': 'content_disposition_value', 'content_encoding': 'content_encoding_value', 'content_language': 'content_language_value', 'content_type': 'content_type_value', 'cache_control': 'cache_control_value', 'custom_time': 'custom_time_value', 'custom_metadata': {}}, 'rewrite_object': {'kms_key': 'kms_key_value'}, 'logging_config': {'log_actions': [6], 'log_action_states': [1]}, 'create_time': {'seconds': 751, 'nanos': 543}, 'schedule_time': {}, 'complete_time': {}, 'counters': {'total_object_count': 1922, 'succeeded_object_count': 2307, 'failed_object_count': 1987}, 'error_summaries': [{'error_code': 1, 'error_count': 1202, 'error_log_entries': [{'object_uri': 'object_uri_value', 'error_details': ['error_details_value1', 'error_details_value2']}]}], 'state': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = storage_batch_operations.CreateJobRequest.meta.fields["job"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["job"][field])): + del request_init["job"][field][i][subfield] + else: + del request_init["job"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_job(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_create_job") as post, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_create_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_create_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = storage_batch_operations.CreateJobRequest.pb(storage_batch_operations.CreateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = storage_batch_operations.CreateJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_job_rest_bad_request(request_type=storage_batch_operations.DeleteJobRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_job(request) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.DeleteJobRequest, + dict, +]) +def test_delete_job_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_delete_job") as pre: + pre.assert_not_called() + pb_message = storage_batch_operations.DeleteJobRequest.pb(storage_batch_operations.DeleteJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = storage_batch_operations.DeleteJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_cancel_job_rest_bad_request(request_type=storage_batch_operations.CancelJobRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_job(request) + + +@pytest.mark.parametrize("request_type", [ + storage_batch_operations.CancelJobRequest, + dict, +]) +def test_cancel_job_rest_call_success(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = storage_batch_operations.CancelJobResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_batch_operations.CancelJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_batch_operations.CancelJobResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_job_rest_interceptors(null_interceptor): + transport = transports.StorageBatchOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), + ) + client = StorageBatchOperationsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_cancel_job") as post, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_cancel_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_cancel_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = storage_batch_operations.CancelJobRequest.pb(storage_batch_operations.CancelJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = storage_batch_operations.CancelJobResponse.to_json(storage_batch_operations.CancelJobResponse()) + req.return_value.content = return_value + + request = storage_batch_operations.CancelJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storage_batch_operations.CancelJobResponse() + post_with_metadata.return_value = storage_batch_operations.CancelJobResponse(), metadata + + client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_jobs_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_job_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + client.create_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CreateJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_job_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + client.delete_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.DeleteJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = storage_batch_operations.CancelJobRequest() + + assert args[0] == request_msg + + +def test_storage_batch_operations_rest_lro_client(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.StorageBatchOperationsGrpcTransport, + ) + +def test_storage_batch_operations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.StorageBatchOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_storage_batch_operations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.StorageBatchOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_jobs', + 'get_job', + 'create_job', + 'delete_job', + 'cancel_job', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_storage_batch_operations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StorageBatchOperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_storage_batch_operations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StorageBatchOperationsTransport() + adc.assert_called_once() + + +def test_storage_batch_operations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + StorageBatchOperationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageBatchOperationsGrpcTransport, + transports.StorageBatchOperationsGrpcAsyncIOTransport, + ], +) +def test_storage_batch_operations_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StorageBatchOperationsGrpcTransport, + transports.StorageBatchOperationsGrpcAsyncIOTransport, + transports.StorageBatchOperationsRestTransport, + ], +) +def test_storage_batch_operations_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.StorageBatchOperationsGrpcTransport, grpc_helpers), + (transports.StorageBatchOperationsGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_storage_batch_operations_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "storagebatchoperations.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="storagebatchoperations.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.StorageBatchOperationsGrpcTransport, transports.StorageBatchOperationsGrpcAsyncIOTransport]) +def test_storage_batch_operations_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_storage_batch_operations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.StorageBatchOperationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_storage_batch_operations_host_no_port(transport_name): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='storagebatchoperations.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'storagebatchoperations.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://storagebatchoperations.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_storage_batch_operations_host_with_port(transport_name): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='storagebatchoperations.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'storagebatchoperations.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://storagebatchoperations.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_storage_batch_operations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = StorageBatchOperationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = StorageBatchOperationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.create_job._session + session2 = client2.transport.create_job._session + assert session1 != session2 + session1 = client1.transport.delete_job._session + session2 = client2.transport.delete_job._session + assert session1 != session2 + session1 = client1.transport.cancel_job._session + session2 = client2.transport.cancel_job._session + assert session1 != session2 +def test_storage_batch_operations_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.StorageBatchOperationsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_storage_batch_operations_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.StorageBatchOperationsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.StorageBatchOperationsGrpcTransport, transports.StorageBatchOperationsGrpcAsyncIOTransport]) +def test_storage_batch_operations_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.StorageBatchOperationsGrpcTransport, transports.StorageBatchOperationsGrpcAsyncIOTransport]) +def test_storage_batch_operations_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_storage_batch_operations_grpc_lro_client(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_storage_batch_operations_grpc_lro_async_client(): + client = StorageBatchOperationsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_crypto_key_path(): + project = "squid" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = StorageBatchOperationsClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = StorageBatchOperationsClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_crypto_key_path(path) + assert expected == actual + +def test_job_path(): + project = "winkle" + location = "nautilus" + job = "scallop" + expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + actual = StorageBatchOperationsClient.job_path(project, location, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "abalone", + "location": "squid", + "job": "clam", + } + path = StorageBatchOperationsClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_job_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = StorageBatchOperationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = StorageBatchOperationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = StorageBatchOperationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = StorageBatchOperationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = StorageBatchOperationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = StorageBatchOperationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = StorageBatchOperationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = StorageBatchOperationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = StorageBatchOperationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = StorageBatchOperationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = StorageBatchOperationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.StorageBatchOperationsTransport, '_prep_wrapped_messages') as prep: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.StorageBatchOperationsTransport, '_prep_wrapped_messages') as prep: + transport_class = StorageBatchOperationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = StorageBatchOperationsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = StorageBatchOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport), + (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From cccf77560314c8ded88cd9c853b6b26ea0d043e2 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 23 Apr 2025 22:11:02 +0000 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../v1/.coveragerc | 13 - .../v1/.flake8 | 34 - .../v1/LICENSE | 202 - .../v1/MANIFEST.in | 20 - .../v1/README.rst | 143 - .../v1/docs/_static/custom.css | 20 - .../v1/docs/_templates/layout.html | 50 - .../v1/docs/conf.py | 385 - .../v1/docs/index.rst | 10 - .../v1/docs/multiprocessing.rst | 7 - .../storagebatchoperations_v1/services_.rst | 6 - .../storage_batch_operations.rst | 10 - .../docs/storagebatchoperations_v1/types_.rst | 6 - .../cloud/storagebatchoperations/__init__.py | 67 - .../storagebatchoperations/gapic_version.py | 16 - .../cloud/storagebatchoperations/py.typed | 2 - .../storagebatchoperations_v1/__init__.py | 68 - .../gapic_metadata.json | 103 - .../gapic_version.py | 16 - .../cloud/storagebatchoperations_v1/py.typed | 2 - .../services/__init__.py | 15 - .../storage_batch_operations/__init__.py | 22 - .../storage_batch_operations/async_client.py | 1153 --- .../storage_batch_operations/client.py | 1530 ---- .../storage_batch_operations/pagers.py | 167 - .../transports/README.rst | 9 - .../transports/__init__.py | 38 - .../transports/base.py | 330 - .../transports/grpc.py | 584 -- .../transports/grpc_asyncio.py | 682 -- .../transports/rest.py | 1819 ----- .../transports/rest_base.py | 455 -- .../types/__init__.py | 62 - .../types/storage_batch_operations.py | 288 - .../types/storage_batch_operations_types.py | 653 -- .../v1/mypy.ini | 3 - .../v1/noxfile.py | 591 -- ...oogle.cloud.storagebatchoperations.v1.json | 830 --- ...orage_batch_operations_cancel_job_async.py | 52 - ...torage_batch_operations_cancel_job_sync.py | 52 - ...orage_batch_operations_create_job_async.py | 63 - ...torage_batch_operations_create_job_sync.py | 63 - ...orage_batch_operations_delete_job_async.py | 50 - ...torage_batch_operations_delete_job_sync.py | 50 - ..._storage_batch_operations_get_job_async.py | 52 - ...d_storage_batch_operations_get_job_sync.py | 52 - ...torage_batch_operations_list_jobs_async.py | 53 - ...storage_batch_operations_list_jobs_sync.py | 53 - ...ixup_storagebatchoperations_v1_keywords.py | 180 - .../v1/setup.py | 98 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.13.txt | 11 - .../v1/testing/constraints-3.7.txt | 10 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../storagebatchoperations_v1/__init__.py | 16 - .../test_storage_batch_operations.py | 6318 ----------------- .../types/storage_batch_operations_types.py | 8 +- 63 files changed, 4 insertions(+), 17666 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc b/owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc deleted file mode 100644 index bf5f3b460141..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/storagebatchoperations/__init__.py - google/cloud/storagebatchoperations/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 b/owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 deleted file mode 100644 index 90316de21489..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE b/owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE deleted file mode 100644 index d64569567334..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in b/owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in deleted file mode 100644 index dae249ec8976..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst deleted file mode 100644 index d53120593783..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Storagebatchoperations API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Storagebatchoperations API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b23..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcfe1..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py deleted file mode 100644 index 5ba6711f5cb5..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-storagebatchoperations documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-storagebatchoperations" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-storagebatchoperations", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-storagebatchoperations-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-storagebatchoperations.tex", - u"google-cloud-storagebatchoperations Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-storagebatchoperations", - "google-cloud-storagebatchoperations Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-storagebatchoperations", - "google-cloud-storagebatchoperations Documentation", - author, - "google-cloud-storagebatchoperations", - "google-cloud-storagebatchoperations Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst deleted file mode 100644 index fcca274c7f0b..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - storagebatchoperations_v1/services_ - storagebatchoperations_v1/types_ diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea65..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst deleted file mode 100644 index 7457657d5b20..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Storagebatchoperations v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - storage_batch_operations diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst deleted file mode 100644 index ae1ef85a3689..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/storage_batch_operations.rst +++ /dev/null @@ -1,10 +0,0 @@ -StorageBatchOperations ----------------------------------------- - -.. automodule:: google.cloud.storagebatchoperations_v1.services.storage_batch_operations - :members: - :inherited-members: - -.. automodule:: google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst deleted file mode 100644 index 2c6e1fefedd6..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/docs/storagebatchoperations_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Storagebatchoperations v1 API -==================================================== - -.. automodule:: google.cloud.storagebatchoperations_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py deleted file mode 100644 index 62cc717939c9..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/__init__.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.storagebatchoperations import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations.client import StorageBatchOperationsClient -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations.async_client import StorageBatchOperationsAsyncClient - -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import CancelJobRequest -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import CancelJobResponse -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import CreateJobRequest -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import DeleteJobRequest -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import GetJobRequest -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import ListJobsRequest -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import ListJobsResponse -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations import OperationMetadata -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import BucketList -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import Counters -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import DeleteObject -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import ErrorLogEntry -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import ErrorSummary -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import Job -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import LoggingConfig -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import Manifest -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import PrefixList -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import PutMetadata -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import PutObjectHold -from google.cloud.storagebatchoperations_v1.types.storage_batch_operations_types import RewriteObject - -__all__ = ('StorageBatchOperationsClient', - 'StorageBatchOperationsAsyncClient', - 'CancelJobRequest', - 'CancelJobResponse', - 'CreateJobRequest', - 'DeleteJobRequest', - 'GetJobRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'OperationMetadata', - 'BucketList', - 'Counters', - 'DeleteObject', - 'ErrorLogEntry', - 'ErrorSummary', - 'Job', - 'LoggingConfig', - 'Manifest', - 'PrefixList', - 'PutMetadata', - 'PutObjectHold', - 'RewriteObject', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py deleted file mode 100644 index 20a9cd975b02..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed deleted file mode 100644 index bc2c99219089..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-storagebatchoperations package uses inline types. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py deleted file mode 100644 index 373fd0e17e75..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.storagebatchoperations_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.storage_batch_operations import StorageBatchOperationsClient -from .services.storage_batch_operations import StorageBatchOperationsAsyncClient - -from .types.storage_batch_operations import CancelJobRequest -from .types.storage_batch_operations import CancelJobResponse -from .types.storage_batch_operations import CreateJobRequest -from .types.storage_batch_operations import DeleteJobRequest -from .types.storage_batch_operations import GetJobRequest -from .types.storage_batch_operations import ListJobsRequest -from .types.storage_batch_operations import ListJobsResponse -from .types.storage_batch_operations import OperationMetadata -from .types.storage_batch_operations_types import BucketList -from .types.storage_batch_operations_types import Counters -from .types.storage_batch_operations_types import DeleteObject -from .types.storage_batch_operations_types import ErrorLogEntry -from .types.storage_batch_operations_types import ErrorSummary -from .types.storage_batch_operations_types import Job -from .types.storage_batch_operations_types import LoggingConfig -from .types.storage_batch_operations_types import Manifest -from .types.storage_batch_operations_types import PrefixList -from .types.storage_batch_operations_types import PutMetadata -from .types.storage_batch_operations_types import PutObjectHold -from .types.storage_batch_operations_types import RewriteObject - -__all__ = ( - 'StorageBatchOperationsAsyncClient', -'BucketList', -'CancelJobRequest', -'CancelJobResponse', -'Counters', -'CreateJobRequest', -'DeleteJobRequest', -'DeleteObject', -'ErrorLogEntry', -'ErrorSummary', -'GetJobRequest', -'Job', -'ListJobsRequest', -'ListJobsResponse', -'LoggingConfig', -'Manifest', -'OperationMetadata', -'PrefixList', -'PutMetadata', -'PutObjectHold', -'RewriteObject', -'StorageBatchOperationsClient', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json deleted file mode 100644 index c5bc99c44670..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.storagebatchoperations_v1", - "protoPackage": "google.cloud.storagebatchoperations.v1", - "schema": "1.0", - "services": { - "StorageBatchOperations": { - "clients": { - "grpc": { - "libraryClient": "StorageBatchOperationsClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - }, - "grpc-async": { - "libraryClient": "StorageBatchOperationsAsyncClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - }, - "rest": { - "libraryClient": "StorageBatchOperationsClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b02..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed deleted file mode 100644 index bc2c99219089..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-storagebatchoperations package uses inline types. diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py deleted file mode 100644 index cbf94b283c70..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py deleted file mode 100644 index d13eaefe23de..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import StorageBatchOperationsClient -from .async_client import StorageBatchOperationsAsyncClient - -__all__ = ( - 'StorageBatchOperationsClient', - 'StorageBatchOperationsAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py deleted file mode 100644 index 70599dae075e..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/async_client.py +++ /dev/null @@ -1,1153 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.storagebatchoperations_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import pagers -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import StorageBatchOperationsGrpcAsyncIOTransport -from .client import StorageBatchOperationsClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class StorageBatchOperationsAsyncClient: - """Storage Batch Operations offers a managed experience to - perform batch operations on millions of Cloud Storage objects in - a serverless fashion. With this service, you can automate and - simplify large scale API operations performed on Cloud Storage - objects. - """ - - _client: StorageBatchOperationsClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = StorageBatchOperationsClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = StorageBatchOperationsClient._DEFAULT_UNIVERSE - - crypto_key_path = staticmethod(StorageBatchOperationsClient.crypto_key_path) - parse_crypto_key_path = staticmethod(StorageBatchOperationsClient.parse_crypto_key_path) - job_path = staticmethod(StorageBatchOperationsClient.job_path) - parse_job_path = staticmethod(StorageBatchOperationsClient.parse_job_path) - common_billing_account_path = staticmethod(StorageBatchOperationsClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(StorageBatchOperationsClient.parse_common_billing_account_path) - common_folder_path = staticmethod(StorageBatchOperationsClient.common_folder_path) - parse_common_folder_path = staticmethod(StorageBatchOperationsClient.parse_common_folder_path) - common_organization_path = staticmethod(StorageBatchOperationsClient.common_organization_path) - parse_common_organization_path = staticmethod(StorageBatchOperationsClient.parse_common_organization_path) - common_project_path = staticmethod(StorageBatchOperationsClient.common_project_path) - parse_common_project_path = staticmethod(StorageBatchOperationsClient.parse_common_project_path) - common_location_path = staticmethod(StorageBatchOperationsClient.common_location_path) - parse_common_location_path = staticmethod(StorageBatchOperationsClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - StorageBatchOperationsAsyncClient: The constructed client. - """ - return StorageBatchOperationsClient.from_service_account_info.__func__(StorageBatchOperationsAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - StorageBatchOperationsAsyncClient: The constructed client. - """ - return StorageBatchOperationsClient.from_service_account_file.__func__(StorageBatchOperationsAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return StorageBatchOperationsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> StorageBatchOperationsTransport: - """Returns the transport used by the client instance. - - Returns: - StorageBatchOperationsTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = StorageBatchOperationsClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, StorageBatchOperationsTransport, Callable[..., StorageBatchOperationsTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the storage batch operations async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,StorageBatchOperationsTransport,Callable[..., StorageBatchOperationsTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the StorageBatchOperationsTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = StorageBatchOperationsClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient`.", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "credentialsType": None, - } - ) - - async def list_jobs(self, - request: Optional[Union[storage_batch_operations.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists Jobs in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - async def sample_list_jobs(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.storagebatchoperations_v1.types.ListJobsRequest, dict]]): - The request object. Message for request to list Jobs - parent (:class:`str`): - Required. Format: - projects/{project_id}/locations/{location_id} . - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsAsyncPager: - Message for response to listing Jobs - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.ListJobsRequest): - request = storage_batch_operations.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[storage_batch_operations.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> storage_batch_operations_types.Job: - r"""Gets a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - async def sample_get_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.storagebatchoperations_v1.types.GetJobRequest, dict]]): - The request object. Message for getting a Job - name (:class:`str`): - Required. ``name`` of the job to retrieve. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id} - . - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.storagebatchoperations_v1.types.Job: - The Storage Batch Operations Job - description. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.GetJobRequest): - request = storage_batch_operations.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_job(self, - request: Optional[Union[storage_batch_operations.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[storage_batch_operations_types.Job] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - async def sample_create_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - job = storagebatchoperations_v1.Job() - job.bucket_list.buckets.bucket = "bucket_value" - job.put_object_hold.temporary_hold = "UNSET" - job.put_object_hold.event_based_hold = "UNSET" - - request = storagebatchoperations_v1.CreateJobRequest( - parent="parent_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - operation = client.create_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.storagebatchoperations_v1.types.CreateJobRequest, dict]]): - The request object. Message for creating a Job - parent (:class:`str`): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.storagebatchoperations_v1.types.Job`): - Required. The resource being created - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (:class:`str`): - Required. The optional ``job_id`` for this Job . If not - specified, an id is generated. ``job_id`` should be no - more than 128 characters and must include only - characters available in DNS names, as defined by - RFC-1123. - - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.storagebatchoperations_v1.types.Job` - The Storage Batch Operations Job description. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job, job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.CreateJobRequest): - request = storage_batch_operations.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - storage_batch_operations_types.Job, - metadata_type=storage_batch_operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: Optional[Union[storage_batch_operations.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - async def sample_delete_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_job(request=request) - - Args: - request (Optional[Union[google.cloud.storagebatchoperations_v1.types.DeleteJobRequest, dict]]): - The request object. Message for deleting a Job - name (:class:`str`): - Required. The ``name`` of the job to delete. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id} - . - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.DeleteJobRequest): - request = storage_batch_operations.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_job(self, - request: Optional[Union[storage_batch_operations.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> storage_batch_operations.CancelJobResponse: - r"""Cancels a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - async def sample_cancel_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - response = await client.cancel_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.storagebatchoperations_v1.types.CancelJobRequest, dict]]): - The request object. Message for Job to Cancel - name (:class:`str`): - Required. The ``name`` of the job to cancel. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id}. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.storagebatchoperations_v1.types.CancelJobResponse: - Message for response to cancel Job. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.CancelJobRequest): - request = storage_batch_operations.CancelJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "StorageBatchOperationsAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "StorageBatchOperationsAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py deleted file mode 100644 index c5b92561bc74..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/client.py +++ /dev/null @@ -1,1530 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.storagebatchoperations_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import pagers -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import StorageBatchOperationsGrpcTransport -from .transports.grpc_asyncio import StorageBatchOperationsGrpcAsyncIOTransport -from .transports.rest import StorageBatchOperationsRestTransport - - -class StorageBatchOperationsClientMeta(type): - """Metaclass for the StorageBatchOperations client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[StorageBatchOperationsTransport]] - _transport_registry["grpc"] = StorageBatchOperationsGrpcTransport - _transport_registry["grpc_asyncio"] = StorageBatchOperationsGrpcAsyncIOTransport - _transport_registry["rest"] = StorageBatchOperationsRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[StorageBatchOperationsTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class StorageBatchOperationsClient(metaclass=StorageBatchOperationsClientMeta): - """Storage Batch Operations offers a managed experience to - perform batch operations on millions of Cloud Storage objects in - a serverless fashion. With this service, you can automate and - simplify large scale API operations performed on Cloud Storage - objects. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "storagebatchoperations.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "storagebatchoperations.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - StorageBatchOperationsClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - StorageBatchOperationsClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> StorageBatchOperationsTransport: - """Returns the transport used by the client instance. - - Returns: - StorageBatchOperationsTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: - """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - - @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: - """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_path(project: str,location: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = StorageBatchOperationsClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = StorageBatchOperationsClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, StorageBatchOperationsTransport, Callable[..., StorageBatchOperationsTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the storage batch operations client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,StorageBatchOperationsTransport,Callable[..., StorageBatchOperationsTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the StorageBatchOperationsTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = StorageBatchOperationsClient._read_environment_variables() - self._client_cert_source = StorageBatchOperationsClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = StorageBatchOperationsClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, StorageBatchOperationsTransport) - if transport_provided: - # transport is a StorageBatchOperationsTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(StorageBatchOperationsTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - StorageBatchOperationsClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[StorageBatchOperationsTransport], Callable[..., StorageBatchOperationsTransport]] = ( - StorageBatchOperationsClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., StorageBatchOperationsTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient`.", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "credentialsType": None, - } - ) - - def list_jobs(self, - request: Optional[Union[storage_batch_operations.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsPager: - r"""Lists Jobs in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - def sample_list_jobs(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.storagebatchoperations_v1.types.ListJobsRequest, dict]): - The request object. Message for request to list Jobs - parent (str): - Required. Format: - projects/{project_id}/locations/{location_id} . - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsPager: - Message for response to listing Jobs - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.ListJobsRequest): - request = storage_batch_operations.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[storage_batch_operations.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> storage_batch_operations_types.Job: - r"""Gets a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - def sample_get_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.storagebatchoperations_v1.types.GetJobRequest, dict]): - The request object. Message for getting a Job - name (str): - Required. ``name`` of the job to retrieve. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id} - . - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.storagebatchoperations_v1.types.Job: - The Storage Batch Operations Job - description. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.GetJobRequest): - request = storage_batch_operations.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_job(self, - request: Optional[Union[storage_batch_operations.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[storage_batch_operations_types.Job] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - def sample_create_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - job = storagebatchoperations_v1.Job() - job.bucket_list.buckets.bucket = "bucket_value" - job.put_object_hold.temporary_hold = "UNSET" - job.put_object_hold.event_based_hold = "UNSET" - - request = storagebatchoperations_v1.CreateJobRequest( - parent="parent_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - operation = client.create_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.storagebatchoperations_v1.types.CreateJobRequest, dict]): - The request object. Message for creating a Job - parent (str): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.storagebatchoperations_v1.types.Job): - Required. The resource being created - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (str): - Required. The optional ``job_id`` for this Job . If not - specified, an id is generated. ``job_id`` should be no - more than 128 characters and must include only - characters available in DNS names, as defined by - RFC-1123. - - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.storagebatchoperations_v1.types.Job` - The Storage Batch Operations Job description. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job, job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.CreateJobRequest): - request = storage_batch_operations.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - storage_batch_operations_types.Job, - metadata_type=storage_batch_operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: Optional[Union[storage_batch_operations.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - def sample_delete_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - client.delete_job(request=request) - - Args: - request (Union[google.cloud.storagebatchoperations_v1.types.DeleteJobRequest, dict]): - The request object. Message for deleting a Job - name (str): - Required. The ``name`` of the job to delete. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id} - . - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.DeleteJobRequest): - request = storage_batch_operations.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_job(self, - request: Optional[Union[storage_batch_operations.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> storage_batch_operations.CancelJobResponse: - r"""Cancels a batch job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import storagebatchoperations_v1 - - def sample_cancel_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - response = client.cancel_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.storagebatchoperations_v1.types.CancelJobRequest, dict]): - The request object. Message for Job to Cancel - name (str): - Required. The ``name`` of the job to cancel. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id}. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.storagebatchoperations_v1.types.CancelJobResponse: - Message for response to cancel Job. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, storage_batch_operations.CancelJobRequest): - request = storage_batch_operations.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "StorageBatchOperationsClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "StorageBatchOperationsClient", -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py deleted file mode 100644 index 3dea7c30fb00..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/pagers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., storage_batch_operations.ListJobsResponse], - request: storage_batch_operations.ListJobsRequest, - response: storage_batch_operations.ListJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.storagebatchoperations_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.storagebatchoperations_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = storage_batch_operations.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[storage_batch_operations.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[storage_batch_operations_types.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.storagebatchoperations_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[storage_batch_operations.ListJobsResponse]], - request: storage_batch_operations.ListJobsRequest, - response: storage_batch_operations.ListJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.storagebatchoperations_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.storagebatchoperations_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = storage_batch_operations.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[storage_batch_operations.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[storage_batch_operations_types.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst deleted file mode 100644 index 2cc2d87b2b31..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`StorageBatchOperationsTransport` is the ABC for all transports. -- public child `StorageBatchOperationsGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `StorageBatchOperationsGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseStorageBatchOperationsRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `StorageBatchOperationsRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py deleted file mode 100644 index f4838e6e09e6..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import StorageBatchOperationsTransport -from .grpc import StorageBatchOperationsGrpcTransport -from .grpc_asyncio import StorageBatchOperationsGrpcAsyncIOTransport -from .rest import StorageBatchOperationsRestTransport -from .rest import StorageBatchOperationsRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[StorageBatchOperationsTransport]] -_transport_registry['grpc'] = StorageBatchOperationsGrpcTransport -_transport_registry['grpc_asyncio'] = StorageBatchOperationsGrpcAsyncIOTransport -_transport_registry['rest'] = StorageBatchOperationsRestTransport - -__all__ = ( - 'StorageBatchOperationsTransport', - 'StorageBatchOperationsGrpcTransport', - 'StorageBatchOperationsGrpcAsyncIOTransport', - 'StorageBatchOperationsRestTransport', - 'StorageBatchOperationsRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py deleted file mode 100644 index cfdcc1ca5dd4..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py +++ /dev/null @@ -1,330 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.storagebatchoperations_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class StorageBatchOperationsTransport(abc.ABC): - """Abstract transport class for StorageBatchOperations.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'storagebatchoperations.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_retry=retries.Retry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_retry=retries.Retry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: gapic_v1.method.wrap_method( - self.cancel_job, - default_retry=retries.Retry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [storage_batch_operations.ListJobsRequest], - Union[ - storage_batch_operations.ListJobsResponse, - Awaitable[storage_batch_operations.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [storage_batch_operations.GetJobRequest], - Union[ - storage_batch_operations_types.Job, - Awaitable[storage_batch_operations_types.Job] - ]]: - raise NotImplementedError() - - @property - def create_job(self) -> Callable[ - [storage_batch_operations.CreateJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [storage_batch_operations.DeleteJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def cancel_job(self) -> Callable[ - [storage_batch_operations.CancelJobRequest], - Union[ - storage_batch_operations.CancelJobResponse, - Awaitable[storage_batch_operations.CancelJobResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'StorageBatchOperationsTransport', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py deleted file mode 100644 index f0a5c055f028..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc.py +++ /dev/null @@ -1,584 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class StorageBatchOperationsGrpcTransport(StorageBatchOperationsTransport): - """gRPC backend transport for StorageBatchOperations. - - Storage Batch Operations offers a managed experience to - perform batch operations on millions of Cloud Storage objects in - a serverless fashion. With this service, you can automate and - simplify large scale API operations performed on Cloud Storage - objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'storagebatchoperations.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'storagebatchoperations.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_jobs(self) -> Callable[ - [storage_batch_operations.ListJobsRequest], - storage_batch_operations.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists Jobs in a given project and location. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/ListJobs', - request_serializer=storage_batch_operations.ListJobsRequest.serialize, - response_deserializer=storage_batch_operations.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [storage_batch_operations.GetJobRequest], - storage_batch_operations_types.Job]: - r"""Return a callable for the get job method over gRPC. - - Gets a batch job. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/GetJob', - request_serializer=storage_batch_operations.GetJobRequest.serialize, - response_deserializer=storage_batch_operations_types.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def create_job(self) -> Callable[ - [storage_batch_operations.CreateJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the create job method over gRPC. - - Creates a batch job. - - Returns: - Callable[[~.CreateJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CreateJob', - request_serializer=storage_batch_operations.CreateJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_job'] - - @property - def delete_job(self) -> Callable[ - [storage_batch_operations.DeleteJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a batch job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/DeleteJob', - request_serializer=storage_batch_operations.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def cancel_job(self) -> Callable[ - [storage_batch_operations.CancelJobRequest], - storage_batch_operations.CancelJobResponse]: - r"""Return a callable for the cancel job method over gRPC. - - Cancels a batch job. - - Returns: - Callable[[~.CancelJobRequest], - ~.CancelJobResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CancelJob', - request_serializer=storage_batch_operations.CancelJobRequest.serialize, - response_deserializer=storage_batch_operations.CancelJobResponse.deserialize, - ) - return self._stubs['cancel_job'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'StorageBatchOperationsGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py deleted file mode 100644 index 78842b6f6356..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py +++ /dev/null @@ -1,682 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO -from .grpc import StorageBatchOperationsGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class StorageBatchOperationsGrpcAsyncIOTransport(StorageBatchOperationsTransport): - """gRPC AsyncIO backend transport for StorageBatchOperations. - - Storage Batch Operations offers a managed experience to - perform batch operations on millions of Cloud Storage objects in - a serverless fashion. With this service, you can automate and - simplify large scale API operations performed on Cloud Storage - objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'storagebatchoperations.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'storagebatchoperations.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_jobs(self) -> Callable[ - [storage_batch_operations.ListJobsRequest], - Awaitable[storage_batch_operations.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists Jobs in a given project and location. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/ListJobs', - request_serializer=storage_batch_operations.ListJobsRequest.serialize, - response_deserializer=storage_batch_operations.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [storage_batch_operations.GetJobRequest], - Awaitable[storage_batch_operations_types.Job]]: - r"""Return a callable for the get job method over gRPC. - - Gets a batch job. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/GetJob', - request_serializer=storage_batch_operations.GetJobRequest.serialize, - response_deserializer=storage_batch_operations_types.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def create_job(self) -> Callable[ - [storage_batch_operations.CreateJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create job method over gRPC. - - Creates a batch job. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CreateJob', - request_serializer=storage_batch_operations.CreateJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_job'] - - @property - def delete_job(self) -> Callable[ - [storage_batch_operations.DeleteJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a batch job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/DeleteJob', - request_serializer=storage_batch_operations.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def cancel_job(self) -> Callable[ - [storage_batch_operations.CancelJobRequest], - Awaitable[storage_batch_operations.CancelJobResponse]]: - r"""Return a callable for the cancel job method over gRPC. - - Cancels a batch job. - - Returns: - Callable[[~.CancelJobRequest], - Awaitable[~.CancelJobResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.storagebatchoperations.v1.StorageBatchOperations/CancelJob', - request_serializer=storage_batch_operations.CancelJobRequest.serialize, - response_deserializer=storage_batch_operations.CancelJobResponse.deserialize, - ) - return self._stubs['cancel_job'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_jobs: self._wrap_method( - self.list_jobs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: self._wrap_method( - self.get_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_job: self._wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: self._wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: self._wrap_method( - self.cancel_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'StorageBatchOperationsGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py deleted file mode 100644 index 65fda1fe21dd..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest.py +++ /dev/null @@ -1,1819 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseStorageBatchOperationsRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class StorageBatchOperationsRestInterceptor: - """Interceptor for StorageBatchOperations. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the StorageBatchOperationsRestTransport. - - .. code-block:: python - class MyCustomStorageBatchOperationsInterceptor(StorageBatchOperationsRestInterceptor): - def pre_cancel_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_cancel_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - transport = StorageBatchOperationsRestTransport(interceptor=MyCustomStorageBatchOperationsInterceptor()) - client = StorageBatchOperationsClient(transport=transport) - - - """ - def pre_cancel_job(self, request: storage_batch_operations.CancelJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_cancel_job(self, response: storage_batch_operations.CancelJobResponse) -> storage_batch_operations.CancelJobResponse: - """Post-rpc interceptor for cancel_job - - DEPRECATED. Please use the `post_cancel_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. This `post_cancel_job` interceptor runs - before the `post_cancel_job_with_metadata` interceptor. - """ - return response - - def post_cancel_job_with_metadata(self, response: storage_batch_operations.CancelJobResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.CancelJobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for cancel_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the StorageBatchOperations server but before it is returned to user code. - - We recommend only using this `post_cancel_job_with_metadata` - interceptor in new development instead of the `post_cancel_job` interceptor. - When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the - `post_cancel_job` interceptor. The (possibly modified) response returned by - `post_cancel_job` will be passed to - `post_cancel_job_with_metadata`. - """ - return response, metadata - - def pre_create_job(self, request: storage_batch_operations.CreateJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.CreateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_create_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_job - - DEPRECATED. Please use the `post_create_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. This `post_create_job` interceptor runs - before the `post_create_job_with_metadata` interceptor. - """ - return response - - def post_create_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the StorageBatchOperations server but before it is returned to user code. - - We recommend only using this `post_create_job_with_metadata` - interceptor in new development instead of the `post_create_job` interceptor. - When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the - `post_create_job` interceptor. The (possibly modified) response returned by - `post_create_job` will be passed to - `post_create_job_with_metadata`. - """ - return response, metadata - - def pre_delete_job(self, request: storage_batch_operations.DeleteJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.DeleteJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def pre_get_job(self, request: storage_batch_operations.GetJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_get_job(self, response: storage_batch_operations_types.Job) -> storage_batch_operations_types.Job: - """Post-rpc interceptor for get_job - - DEPRECATED. Please use the `post_get_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. This `post_get_job` interceptor runs - before the `post_get_job_with_metadata` interceptor. - """ - return response - - def post_get_job_with_metadata(self, response: storage_batch_operations_types.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations_types.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the StorageBatchOperations server but before it is returned to user code. - - We recommend only using this `post_get_job_with_metadata` - interceptor in new development instead of the `post_get_job` interceptor. - When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the - `post_get_job` interceptor. The (possibly modified) response returned by - `post_get_job` will be passed to - `post_get_job_with_metadata`. - """ - return response, metadata - - def pre_list_jobs(self, request: storage_batch_operations.ListJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_list_jobs(self, response: storage_batch_operations.ListJobsResponse) -> storage_batch_operations.ListJobsResponse: - """Post-rpc interceptor for list_jobs - - DEPRECATED. Please use the `post_list_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. This `post_list_jobs` interceptor runs - before the `post_list_jobs_with_metadata` interceptor. - """ - return response - - def post_list_jobs_with_metadata(self, response: storage_batch_operations.ListJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[storage_batch_operations.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the StorageBatchOperations server but before it is returned to user code. - - We recommend only using this `post_list_jobs_with_metadata` - interceptor in new development instead of the `post_list_jobs` interceptor. - When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the - `post_list_jobs` interceptor. The (possibly modified) response returned by - `post_list_jobs` will be passed to - `post_list_jobs_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the StorageBatchOperations server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the StorageBatchOperations server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class StorageBatchOperationsRestStub: - _session: AuthorizedSession - _host: str - _interceptor: StorageBatchOperationsRestInterceptor - - -class StorageBatchOperationsRestTransport(_BaseStorageBatchOperationsRestTransport): - """REST backend synchronous transport for StorageBatchOperations. - - Storage Batch Operations offers a managed experience to - perform batch operations on millions of Cloud Storage objects in - a serverless fashion. With this service, you can automate and - simplify large scale API operations performed on Cloud Storage - objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'storagebatchoperations.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[StorageBatchOperationsRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or StorageBatchOperationsRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CancelJob(_BaseStorageBatchOperationsRestTransport._BaseCancelJob, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.CancelJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: storage_batch_operations.CancelJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> storage_batch_operations.CancelJobResponse: - r"""Call the cancel job method over HTTP. - - Args: - request (~.storage_batch_operations.CancelJobRequest): - The request object. Message for Job to Cancel - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.storage_batch_operations.CancelJobResponse: - Message for response to cancel Job. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_http_options() - - request, metadata = self._interceptor.pre_cancel_job(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_transcoded_request(http_options, request) - - body = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.CancelJob", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "CancelJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._CancelJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = storage_batch_operations.CancelJobResponse() - pb_resp = storage_batch_operations.CancelJobResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_cancel_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_cancel_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = storage_batch_operations.CancelJobResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.cancel_job", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "CancelJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateJob(_BaseStorageBatchOperationsRestTransport._BaseCreateJob, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.CreateJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: storage_batch_operations.CreateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create job method over HTTP. - - Args: - request (~.storage_batch_operations.CreateJobRequest): - The request object. Message for creating a Job - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_http_options() - - request, metadata = self._interceptor.pre_create_job(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_transcoded_request(http_options, request) - - body = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.CreateJob", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "CreateJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._CreateJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.create_job", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "CreateJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteJob(_BaseStorageBatchOperationsRestTransport._BaseDeleteJob, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.DeleteJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: storage_batch_operations.DeleteJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete job method over HTTP. - - Args: - request (~.storage_batch_operations.DeleteJobRequest): - The request object. Message for deleting a Job - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_http_options() - - request, metadata = self._interceptor.pre_delete_job(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.DeleteJob", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "DeleteJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._DeleteJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetJob(_BaseStorageBatchOperationsRestTransport._BaseGetJob, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.GetJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: storage_batch_operations.GetJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> storage_batch_operations_types.Job: - r"""Call the get job method over HTTP. - - Args: - request (~.storage_batch_operations.GetJobRequest): - The request object. Message for getting a Job - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.storage_batch_operations_types.Job: - The Storage Batch Operations Job - description. - - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseGetJob._get_http_options() - - request, metadata = self._interceptor.pre_get_job(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseGetJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetJob", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "GetJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._GetJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = storage_batch_operations_types.Job() - pb_resp = storage_batch_operations_types.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = storage_batch_operations_types.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.get_job", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "GetJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListJobs(_BaseStorageBatchOperationsRestTransport._BaseListJobs, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.ListJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: storage_batch_operations.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> storage_batch_operations.ListJobsResponse: - r"""Call the list jobs method over HTTP. - - Args: - request (~.storage_batch_operations.ListJobsRequest): - The request object. Message for request to list Jobs - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.storage_batch_operations.ListJobsResponse: - Message for response to listing Jobs - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseListJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_jobs(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseListJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListJobs", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "ListJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._ListJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = storage_batch_operations.ListJobsResponse() - pb_resp = storage_batch_operations.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = storage_batch_operations.ListJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.list_jobs", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "ListJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def cancel_job(self) -> Callable[ - [storage_batch_operations.CancelJobRequest], - storage_batch_operations.CancelJobResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job(self) -> Callable[ - [storage_batch_operations.CreateJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job(self) -> Callable[ - [storage_batch_operations.DeleteJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job(self) -> Callable[ - [storage_batch_operations.GetJobRequest], - storage_batch_operations_types.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_jobs(self) -> Callable[ - [storage_batch_operations.ListJobsRequest], - storage_batch_operations.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseStorageBatchOperationsRestTransport._BaseGetLocation, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetLocation", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseStorageBatchOperationsRestTransport._BaseListLocations, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListLocations", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseStorageBatchOperationsRestTransport._BaseCancelOperation, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.CancelOperation", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseStorageBatchOperationsRestTransport._BaseDeleteOperation, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseStorageBatchOperationsRestTransport._BaseGetOperation, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.GetOperation", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseStorageBatchOperationsRestTransport._BaseListOperations, StorageBatchOperationsRestStub): - def __hash__(self): - return hash("StorageBatchOperationsRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseStorageBatchOperationsRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseStorageBatchOperationsRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseStorageBatchOperationsRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.ListOperations", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = StorageBatchOperationsRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'StorageBatchOperationsRestTransport', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py deleted file mode 100644 index 733154701bbd..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/rest_base.py +++ /dev/null @@ -1,455 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import StorageBatchOperationsTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseStorageBatchOperationsRestTransport(StorageBatchOperationsTransport): - """Base REST backend transport for StorageBatchOperations. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'storagebatchoperations.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'storagebatchoperations.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCancelJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = storage_batch_operations.CancelJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseStorageBatchOperationsRestTransport._BaseCancelJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "jobId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/jobs', - 'body': 'job', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = storage_batch_operations.CreateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseStorageBatchOperationsRestTransport._BaseCreateJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = storage_batch_operations.DeleteJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseStorageBatchOperationsRestTransport._BaseDeleteJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = storage_batch_operations.GetJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseStorageBatchOperationsRestTransport._BaseGetJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/jobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = storage_batch_operations.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseStorageBatchOperationsRestTransport._BaseListJobs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseStorageBatchOperationsRestTransport', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py deleted file mode 100644 index ebd4cde2da8d..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .storage_batch_operations import ( - CancelJobRequest, - CancelJobResponse, - CreateJobRequest, - DeleteJobRequest, - GetJobRequest, - ListJobsRequest, - ListJobsResponse, - OperationMetadata, -) -from .storage_batch_operations_types import ( - BucketList, - Counters, - DeleteObject, - ErrorLogEntry, - ErrorSummary, - Job, - LoggingConfig, - Manifest, - PrefixList, - PutMetadata, - PutObjectHold, - RewriteObject, -) - -__all__ = ( - 'CancelJobRequest', - 'CancelJobResponse', - 'CreateJobRequest', - 'DeleteJobRequest', - 'GetJobRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'OperationMetadata', - 'BucketList', - 'Counters', - 'DeleteObject', - 'ErrorLogEntry', - 'ErrorSummary', - 'Job', - 'LoggingConfig', - 'Manifest', - 'PrefixList', - 'PutMetadata', - 'PutObjectHold', - 'RewriteObject', -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py deleted file mode 100644 index 2f2ad62038a6..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.storagebatchoperations.v1', - manifest={ - 'ListJobsRequest', - 'ListJobsResponse', - 'GetJobRequest', - 'CreateJobRequest', - 'CancelJobRequest', - 'DeleteJobRequest', - 'CancelJobResponse', - 'OperationMetadata', - }, -) - - -class ListJobsRequest(proto.Message): - r"""Message for request to list Jobs - - Attributes: - parent (str): - Required. Format: - projects/{project_id}/locations/{location_id} . - filter (str): - Optional. Filters results as defined by - https://google.aip.dev/160. - page_size (int): - Optional. The list page size. default page - size is 100. - page_token (str): - Optional. The list page token. - order_by (str): - Optional. Field to sort by. Supported fields are name, - create_time. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListJobsResponse(proto.Message): - r"""Message for response to listing Jobs - - Attributes: - jobs (MutableSequence[google.cloud.storagebatchoperations_v1.types.Job]): - A list of storage batch jobs. - next_page_token (str): - A token identifying a page of results. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence[storage_batch_operations_types.Job] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage_batch_operations_types.Job, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetJobRequest(proto.Message): - r"""Message for getting a Job - - Attributes: - name (str): - Required. ``name`` of the job to retrieve. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id} - . - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateJobRequest(proto.Message): - r"""Message for creating a Job - - Attributes: - parent (str): - Required. Value for parent. - job_id (str): - Required. The optional ``job_id`` for this Job . If not - specified, an id is generated. ``job_id`` should be no more - than 128 characters and must include only characters - available in DNS names, as defined by RFC-1123. - job (google.cloud.storagebatchoperations_v1.types.Job): - Required. The resource being created - request_id (str): - Optional. An optional request ID to identify requests. - Specify a unique request ID in case you need to retry your - request. Requests with same ``request_id`` will ignored for - at least 60 minutes since the first request. The request ID - must be a valid UUID with the exception that zero UUID is - not supported (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - job: storage_batch_operations_types.Job = proto.Field( - proto.MESSAGE, - number=3, - message=storage_batch_operations_types.Job, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CancelJobRequest(proto.Message): - r"""Message for Job to Cancel - - Attributes: - name (str): - Required. The ``name`` of the job to cancel. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id}. - request_id (str): - Optional. An optional request ID to identify requests. - Specify a unique request ID in case you need to retry your - request. Requests with same ``request_id`` will ignored for - at least 60 minutes since the first request. The request ID - must be a valid UUID with the exception that zero UUID is - not supported (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteJobRequest(proto.Message): - r"""Message for deleting a Job - - Attributes: - name (str): - Required. The ``name`` of the job to delete. Format: - projects/{project_id}/locations/{location_id}/jobs/{job_id} - . - request_id (str): - Optional. An optional request ID to identify requests. - Specify a unique request ID in case you need to retry your - request. Requests with same ``request_id`` will ignored for - at least 60 minutes since the first request. The request ID - must be a valid UUID with the exception that zero UUID is - not supported (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelJobResponse(proto.Message): - r"""Message for response to cancel Job. - """ - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - operation (str): - Output only. The unique operation resource - name. Format: - projects/{project}/locations/{location}/operations/{operation}. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have been - cancelled successfully have - [google.longrunning.Operation.error][google.longrunning.Operation.error] - value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - job (google.cloud.storagebatchoperations_v1.types.Job): - Output only. The Job associated with the - operation. - """ - - operation: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=7, - ) - api_version: str = proto.Field( - proto.STRING, - number=8, - ) - job: storage_batch_operations_types.Job = proto.Field( - proto.MESSAGE, - number=10, - message=storage_batch_operations_types.Job, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py deleted file mode 100644 index 30b47c85a51f..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py +++ /dev/null @@ -1,653 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import code_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.storagebatchoperations.v1', - manifest={ - 'Job', - 'BucketList', - 'Manifest', - 'PrefixList', - 'PutObjectHold', - 'DeleteObject', - 'RewriteObject', - 'PutMetadata', - 'ErrorSummary', - 'ErrorLogEntry', - 'Counters', - 'LoggingConfig', - }, -) - - -class Job(proto.Message): - r"""The Storage Batch Operations Job description. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. The resource name of the Job. job_id is unique - within the project, that is either set by the customer or - defined by the service. Format: - projects/{project}/locations/global/jobs/{job_id} . For - example: "projects/123456/locations/global/jobs/job01". - description (str): - Optional. A description provided by the user - for the job. Its max length is 1024 bytes when - Unicode-encoded. - bucket_list (google.cloud.storagebatchoperations_v1.types.BucketList): - Specifies a list of buckets and their objects - to be transformed. - - This field is a member of `oneof`_ ``source``. - put_object_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold): - Changes object hold status. - - This field is a member of `oneof`_ ``transformation``. - delete_object (google.cloud.storagebatchoperations_v1.types.DeleteObject): - Delete objects. - - This field is a member of `oneof`_ ``transformation``. - put_metadata (google.cloud.storagebatchoperations_v1.types.PutMetadata): - Updates object metadata. Allows updating - fixed-key and custom metadata and fixed-key - metadata i.e. Cache-Control, - Content-Disposition, Content-Encoding, - Content-Language, Content-Type, Custom-Time. - - This field is a member of `oneof`_ ``transformation``. - rewrite_object (google.cloud.storagebatchoperations_v1.types.RewriteObject): - Rewrite the object and updates metadata like - KMS key. - - This field is a member of `oneof`_ ``transformation``. - logging_config (google.cloud.storagebatchoperations_v1.types.LoggingConfig): - Optional. Logging configuration. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that the job was - created. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that the job was - scheduled. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that the job was - completed. - counters (google.cloud.storagebatchoperations_v1.types.Counters): - Output only. Information about the progress - of the job. - error_summaries (MutableSequence[google.cloud.storagebatchoperations_v1.types.ErrorSummary]): - Output only. Summarizes errors encountered - with sample error log entries. - state (google.cloud.storagebatchoperations_v1.types.Job.State): - Output only. State of the job. - """ - class State(proto.Enum): - r"""Describes state of a job. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - RUNNING (1): - In progress. - SUCCEEDED (2): - Completed successfully. - CANCELED (3): - Cancelled by the user. - FAILED (4): - Terminated due to an unrecoverable failure. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - SUCCEEDED = 2 - CANCELED = 3 - FAILED = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - bucket_list: 'BucketList' = proto.Field( - proto.MESSAGE, - number=19, - oneof='source', - message='BucketList', - ) - put_object_hold: 'PutObjectHold' = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='PutObjectHold', - ) - delete_object: 'DeleteObject' = proto.Field( - proto.MESSAGE, - number=6, - oneof='transformation', - message='DeleteObject', - ) - put_metadata: 'PutMetadata' = proto.Field( - proto.MESSAGE, - number=8, - oneof='transformation', - message='PutMetadata', - ) - rewrite_object: 'RewriteObject' = proto.Field( - proto.MESSAGE, - number=20, - oneof='transformation', - message='RewriteObject', - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=9, - message='LoggingConfig', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - schedule_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - counters: 'Counters' = proto.Field( - proto.MESSAGE, - number=13, - message='Counters', - ) - error_summaries: MutableSequence['ErrorSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='ErrorSummary', - ) - state: State = proto.Field( - proto.ENUM, - number=15, - enum=State, - ) - - -class BucketList(proto.Message): - r"""Describes list of buckets and their objects to be - transformed. - - Attributes: - buckets (MutableSequence[google.cloud.storagebatchoperations_v1.types.BucketList.Bucket]): - Required. List of buckets and their objects - to be transformed. Currently, only one bucket - configuration is supported. If multiple buckets - are specified, an error will be returned. - """ - - class Bucket(proto.Message): - r"""Describes configuration of a single bucket and its objects to - be transformed. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bucket (str): - Required. Bucket name for the objects to be - transformed. - prefix_list (google.cloud.storagebatchoperations_v1.types.PrefixList): - Specifies objects matching a prefix set. - - This field is a member of `oneof`_ ``object_configuration``. - manifest (google.cloud.storagebatchoperations_v1.types.Manifest): - Specifies objects in a manifest file. - - This field is a member of `oneof`_ ``object_configuration``. - """ - - bucket: str = proto.Field( - proto.STRING, - number=1, - ) - prefix_list: 'PrefixList' = proto.Field( - proto.MESSAGE, - number=2, - oneof='object_configuration', - message='PrefixList', - ) - manifest: 'Manifest' = proto.Field( - proto.MESSAGE, - number=3, - oneof='object_configuration', - message='Manifest', - ) - - buckets: MutableSequence[Bucket] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Bucket, - ) - - -class Manifest(proto.Message): - r"""Describes list of objects to be transformed. - - Attributes: - manifest_location (str): - Required. ``manifest_location`` must contain the manifest - source file that is a CSV file in a Google Cloud Storage - bucket. Each row in the file must include the object details - i.e. BucketId and Name. Generation may optionally be - specified. When it is not specified the live object is acted - upon. ``manifest_location`` should either be - - 1) An absolute path to the object in the format of - gs://bucket_name/path/file_name.csv. - 2) An absolute path with a single wildcard character in the - file name, for example - gs://bucket_name/path/file_name*.csv. If manifest - location is specified with a wildcard, objects in all - manifest files matching the pattern will be acted upon. - """ - - manifest_location: str = proto.Field( - proto.STRING, - number=2, - ) - - -class PrefixList(proto.Message): - r"""Describes prefixes of objects to be transformed. - - Attributes: - included_object_prefixes (MutableSequence[str]): - Optional. Include prefixes of the objects to be transformed. - - - Supports full object name - - Supports prefix of the object name - - Wildcards are not supported - - Supports empty string for all objects in a bucket. - """ - - included_object_prefixes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class PutObjectHold(proto.Message): - r"""Describes options to update object hold. - - Attributes: - temporary_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold.HoldStatus): - Required. Updates object temporary holds - state. When object temporary hold is set, object - cannot be deleted or replaced. - event_based_hold (google.cloud.storagebatchoperations_v1.types.PutObjectHold.HoldStatus): - Required. Updates object event based holds - state. When object event based hold is set, - object cannot be deleted or replaced. Resets - object's time in the bucket for the purposes of - the retention period. - """ - class HoldStatus(proto.Enum): - r"""Describes the status of the hold. - - Values: - HOLD_STATUS_UNSPECIFIED (0): - Default value, Object hold status will not be - changed. - SET (1): - Places the hold. - UNSET (2): - Releases the hold. - """ - HOLD_STATUS_UNSPECIFIED = 0 - SET = 1 - UNSET = 2 - - temporary_hold: HoldStatus = proto.Field( - proto.ENUM, - number=1, - enum=HoldStatus, - ) - event_based_hold: HoldStatus = proto.Field( - proto.ENUM, - number=2, - enum=HoldStatus, - ) - - -class DeleteObject(proto.Message): - r"""Describes options to delete an object. - - Attributes: - permanent_object_deletion_enabled (bool): - Required. Controls deletion behavior when - versioning is enabled for the object's bucket. - If true both live and noncurrent objects will be - permanently deleted. Otherwise live objects in - versioned buckets will become noncurrent and - objects that were already noncurrent will be - skipped. This setting doesn't have any impact on - the Soft Delete feature. All objects deleted by - this service can be be restored for the duration - of the Soft Delete retention duration if - enabled. If enabled and the manifest doesn't - specify an object's generation, a - GetObjectMetadata call (a Class B operation) - will be made to determine the live object - generation. - """ - - permanent_object_deletion_enabled: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class RewriteObject(proto.Message): - r"""Describes options for object rewrite. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kms_key (str): - Required. Resource name of the Cloud KMS key - that will be used to encrypt the object. The - Cloud KMS key must be located in same location - as the object. Refer to - https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys#add-object-key - for additional documentation. Format: - - projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key} - For example: - - "projects/123456/locations/us-central1/keyRings/my-keyring/cryptoKeys/my-key". - The object will be rewritten and set with the - specified KMS key. - - This field is a member of `oneof`_ ``_kms_key``. - """ - - kms_key: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - - -class PutMetadata(proto.Message): - r"""Describes options for object metadata update. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - content_disposition (str): - Optional. Updates objects Content-Disposition - fixed metadata. Unset values will be ignored. - Set empty values to clear the metadata. Refer - https://cloud.google.com/storage/docs/metadata#content-disposition - for additional documentation. - - This field is a member of `oneof`_ ``_content_disposition``. - content_encoding (str): - Optional. Updates objects Content-Encoding - fixed metadata. Unset values will be ignored. - Set empty values to clear the metadata. Refer to - documentation in - https://cloud.google.com/storage/docs/metadata#content-encoding. - - This field is a member of `oneof`_ ``_content_encoding``. - content_language (str): - Optional. Updates objects Content-Language - fixed metadata. Refer to ISO 639-1 language - codes for typical values of this metadata. Max - length 100 characters. Unset values will be - ignored. Set empty values to clear the metadata. - Refer to documentation in - https://cloud.google.com/storage/docs/metadata#content-language. - - This field is a member of `oneof`_ ``_content_language``. - content_type (str): - Optional. Updates objects Content-Type fixed - metadata. Unset values will be ignored. - Set empty values to clear the metadata. Refer - to documentation in - https://cloud.google.com/storage/docs/metadata#content-type - - This field is a member of `oneof`_ ``_content_type``. - cache_control (str): - Optional. Updates objects Cache-Control fixed metadata. - Unset values will be ignored. Set empty values to clear the - metadata. Additionally, the value for Custom-Time cannot - decrease. Refer to documentation in - https://cloud.google.com/storage/docs/metadata#caching_data. - - This field is a member of `oneof`_ ``_cache_control``. - custom_time (str): - Optional. Updates objects Custom-Time fixed - metadata. Unset values will be ignored. Set - empty values to clear the metadata. Refer to - documentation in - https://cloud.google.com/storage/docs/metadata#custom-time. - - This field is a member of `oneof`_ ``_custom_time``. - custom_metadata (MutableMapping[str, str]): - Optional. Updates objects custom metadata. - Adds or sets individual custom metadata key - value pairs on objects. Keys that are set with - empty custom metadata values will have its value - cleared. Existing custom metadata not specified - with this flag is not changed. Refer to - documentation in - https://cloud.google.com/storage/docs/metadata#custom-metadata - """ - - content_disposition: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - content_encoding: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - content_language: str = proto.Field( - proto.STRING, - number=3, - optional=True, - ) - content_type: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - cache_control: str = proto.Field( - proto.STRING, - number=5, - optional=True, - ) - custom_time: str = proto.Field( - proto.STRING, - number=6, - optional=True, - ) - custom_metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - - -class ErrorSummary(proto.Message): - r"""A summary of errors by error code, plus a count and sample - error log entries. - - Attributes: - error_code (google.rpc.code_pb2.Code): - Required. The canonical error code. - error_count (int): - Required. Number of errors encountered per ``error_code``. - error_log_entries (MutableSequence[google.cloud.storagebatchoperations_v1.types.ErrorLogEntry]): - Required. Sample error logs. - """ - - error_code: code_pb2.Code = proto.Field( - proto.ENUM, - number=1, - enum=code_pb2.Code, - ) - error_count: int = proto.Field( - proto.INT64, - number=2, - ) - error_log_entries: MutableSequence['ErrorLogEntry'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ErrorLogEntry', - ) - - -class ErrorLogEntry(proto.Message): - r"""An entry describing an error that has occurred. - - Attributes: - object_uri (str): - Required. Output only. Object URL. e.g. - gs://my_bucket/object.txt - error_details (MutableSequence[str]): - Optional. Output only. At most 5 error log - entries are recorded for a given error code for - a job. - """ - - object_uri: str = proto.Field( - proto.STRING, - number=1, - ) - error_details: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class Counters(proto.Message): - r"""Describes details about the progress of the job. - - Attributes: - total_object_count (int): - Output only. Number of objects listed. - succeeded_object_count (int): - Output only. Number of objects completed. - failed_object_count (int): - Output only. Number of objects failed. - """ - - total_object_count: int = proto.Field( - proto.INT64, - number=1, - ) - succeeded_object_count: int = proto.Field( - proto.INT64, - number=2, - ) - failed_object_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class LoggingConfig(proto.Message): - r"""Specifies the Cloud Logging behavior. - - Attributes: - log_actions (MutableSequence[google.cloud.storagebatchoperations_v1.types.LoggingConfig.LoggableAction]): - Required. Specifies the actions to be logged. - log_action_states (MutableSequence[google.cloud.storagebatchoperations_v1.types.LoggingConfig.LoggableActionState]): - Required. States in which Action are - logged.If empty, no logs are generated. - """ - class LoggableAction(proto.Enum): - r"""Loggable actions types. - - Values: - LOGGABLE_ACTION_UNSPECIFIED (0): - Illegal value, to avoid allowing a default. - TRANSFORM (6): - The corresponding transform action in this - job. - """ - LOGGABLE_ACTION_UNSPECIFIED = 0 - TRANSFORM = 6 - - class LoggableActionState(proto.Enum): - r"""Loggable action states filter. - - Values: - LOGGABLE_ACTION_STATE_UNSPECIFIED (0): - Illegal value, to avoid allowing a default. - SUCCEEDED (1): - ``LoggableAction`` completed successfully. ``SUCCEEDED`` - actions are logged as - [INFO][google.logging.type.LogSeverity.INFO]. - FAILED (2): - ``LoggableAction`` terminated in an error state. ``FAILED`` - actions are logged as - [ERROR][google.logging.type.LogSeverity.ERROR]. - """ - LOGGABLE_ACTION_STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - - log_actions: MutableSequence[LoggableAction] = proto.RepeatedField( - proto.ENUM, - number=1, - enum=LoggableAction, - ) - log_action_states: MutableSequence[LoggableActionState] = proto.RepeatedField( - proto.ENUM, - number=2, - enum=LoggableActionState, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini b/owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py deleted file mode 100644 index 69439bb62866..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/noxfile.py +++ /dev/null @@ -1,591 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-storagebatchoperations" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - "mypy", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/synthtool/issues/1976): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json deleted file mode 100644 index bcbed4715bdb..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/snippet_metadata_google.cloud.storagebatchoperations.v1.json +++ /dev/null @@ -1,830 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.storagebatchoperations.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-storagebatchoperations", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", - "shortName": "StorageBatchOperationsAsyncClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.cancel_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CancelJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.storagebatchoperations_v1.types.CancelJobResponse", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", - "shortName": "StorageBatchOperationsClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.cancel_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CancelJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.storagebatchoperations_v1.types.CancelJobResponse", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", - "shortName": "StorageBatchOperationsAsyncClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.create_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CreateJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.storagebatchoperations_v1.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", - "shortName": "StorageBatchOperationsClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.create_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.CreateJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.storagebatchoperations_v1.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", - "shortName": "StorageBatchOperationsAsyncClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.delete_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.DeleteJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", - "shortName": "StorageBatchOperationsClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.delete_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.DeleteJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", - "shortName": "StorageBatchOperationsAsyncClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.get_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.GetJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.storagebatchoperations_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", - "shortName": "StorageBatchOperationsClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.get_job", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.GetJob", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.storagebatchoperations_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient", - "shortName": "StorageBatchOperationsAsyncClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.ListJobs", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient", - "shortName": "StorageBatchOperationsClient" - }, - "fullName": "google.cloud.storagebatchoperations_v1.StorageBatchOperationsClient.list_jobs", - "method": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations.ListJobs", - "service": { - "fullName": "google.cloud.storagebatchoperations.v1.StorageBatchOperations", - "shortName": "StorageBatchOperations" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.storagebatchoperations_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.storagebatchoperations_v1.services.storage_batch_operations.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py deleted file mode 100644 index 783ed99d0ee2..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -async def sample_cancel_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - response = await client.cancel_job(request=request) - - # Handle the response - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py deleted file mode 100644 index 465b54222faa..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_cancel_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -def sample_cancel_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - response = client.cancel_job(request=request) - - # Handle the response - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py deleted file mode 100644 index 75b9e4cabe92..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -async def sample_create_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - job = storagebatchoperations_v1.Job() - job.bucket_list.buckets.bucket = "bucket_value" - job.put_object_hold.temporary_hold = "UNSET" - job.put_object_hold.event_based_hold = "UNSET" - - request = storagebatchoperations_v1.CreateJobRequest( - parent="parent_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - operation = client.create_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py deleted file mode 100644 index 25f1cbfb49f8..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_create_job_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -def sample_create_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - job = storagebatchoperations_v1.Job() - job.bucket_list.buckets.bucket = "bucket_value" - job.put_object_hold.temporary_hold = "UNSET" - job.put_object_hold.event_based_hold = "UNSET" - - request = storagebatchoperations_v1.CreateJobRequest( - parent="parent_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - operation = client.create_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_CreateJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py deleted file mode 100644 index 1a4115e07be4..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -async def sample_delete_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_job(request=request) - - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py deleted file mode 100644 index aa2c978f56ba..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_delete_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -def sample_delete_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - client.delete_job(request=request) - - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_DeleteJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py deleted file mode 100644 index 49d19bee99ac..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -async def sample_get_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py deleted file mode 100644 index 82a981aae6a6..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_get_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -def sample_get_job(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py deleted file mode 100644 index e8e67403df1e..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -async def sample_list_jobs(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsAsyncClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py deleted file mode 100644 index 3c5c90f47bef..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/samples/generated_samples/storagebatchoperations_v1_generated_storage_batch_operations_list_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-storagebatchoperations - - -# [START storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import storagebatchoperations_v1 - - -def sample_list_jobs(): - # Create a client - client = storagebatchoperations_v1.StorageBatchOperationsClient() - - # Initialize request argument(s) - request = storagebatchoperations_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END storagebatchoperations_v1_generated_StorageBatchOperations_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py deleted file mode 100644 index 598664166809..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/scripts/fixup_storagebatchoperations_v1_keywords.py +++ /dev/null @@ -1,180 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class storagebatchoperationsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('name', 'request_id', ), - 'create_job': ('parent', 'job_id', 'job', 'request_id', ), - 'delete_job': ('name', 'request_id', ), - 'get_job': ('name', ), - 'list_jobs': ('parent', 'filter', 'page_size', 'page_token', 'order_by', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=storagebatchoperationsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the storagebatchoperations client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py deleted file mode 100644 index 69c4a0db82f5..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-storagebatchoperations' - - -description = "Google Cloud Storagebatchoperations API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/storagebatchoperations/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storagebatchoperations" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt deleted file mode 100644 index c20a77817caa..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,11 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -proto-plus>=1 -protobuf>=6 diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt deleted file mode 100644 index a77f12bc13e4..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py b/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py deleted file mode 100644 index 53c7521b6234..000000000000 --- a/owl-bot-staging/google-cloud-storagebatchoperations/v1/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py +++ /dev/null @@ -1,6318 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.location import locations_pb2 -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import StorageBatchOperationsAsyncClient -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import StorageBatchOperationsClient -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import pagers -from google.cloud.storagebatchoperations_v1.services.storage_batch_operations import transports -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations -from google.cloud.storagebatchoperations_v1.types import storage_batch_operations_types -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import code_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert StorageBatchOperationsClient._get_default_mtls_endpoint(None) is None - assert StorageBatchOperationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert StorageBatchOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert StorageBatchOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert StorageBatchOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert StorageBatchOperationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert StorageBatchOperationsClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - StorageBatchOperationsClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert StorageBatchOperationsClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert StorageBatchOperationsClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - StorageBatchOperationsClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert StorageBatchOperationsClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert StorageBatchOperationsClient._get_client_cert_source(None, False) is None - assert StorageBatchOperationsClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert StorageBatchOperationsClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert StorageBatchOperationsClient._get_client_cert_source(None, True) is mock_default_cert_source - assert StorageBatchOperationsClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) -@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = StorageBatchOperationsClient._DEFAULT_UNIVERSE - default_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert StorageBatchOperationsClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert StorageBatchOperationsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT - assert StorageBatchOperationsClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert StorageBatchOperationsClient._get_api_endpoint(None, None, default_universe, "always") == StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT - assert StorageBatchOperationsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == StorageBatchOperationsClient.DEFAULT_MTLS_ENDPOINT - assert StorageBatchOperationsClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert StorageBatchOperationsClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - StorageBatchOperationsClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert StorageBatchOperationsClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert StorageBatchOperationsClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert StorageBatchOperationsClient._get_universe_domain(None, None) == StorageBatchOperationsClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - StorageBatchOperationsClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = StorageBatchOperationsClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = StorageBatchOperationsClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (StorageBatchOperationsClient, "grpc"), - (StorageBatchOperationsAsyncClient, "grpc_asyncio"), - (StorageBatchOperationsClient, "rest"), -]) -def test_storage_batch_operations_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'storagebatchoperations.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://storagebatchoperations.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.StorageBatchOperationsGrpcTransport, "grpc"), - (transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.StorageBatchOperationsRestTransport, "rest"), -]) -def test_storage_batch_operations_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (StorageBatchOperationsClient, "grpc"), - (StorageBatchOperationsAsyncClient, "grpc_asyncio"), - (StorageBatchOperationsClient, "rest"), -]) -def test_storage_batch_operations_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'storagebatchoperations.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://storagebatchoperations.googleapis.com' - ) - - -def test_storage_batch_operations_client_get_transport_class(): - transport = StorageBatchOperationsClient.get_transport_class() - available_transports = [ - transports.StorageBatchOperationsGrpcTransport, - transports.StorageBatchOperationsRestTransport, - ] - assert transport in available_transports - - transport = StorageBatchOperationsClient.get_transport_class("grpc") - assert transport == transports.StorageBatchOperationsGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc"), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio"), - (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest"), -]) -@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) -@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) -def test_storage_batch_operations_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(StorageBatchOperationsClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(StorageBatchOperationsClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", "true"), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", "false"), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest", "true"), - (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest", "false"), -]) -@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) -@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_storage_batch_operations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - StorageBatchOperationsClient, StorageBatchOperationsAsyncClient -]) -@mock.patch.object(StorageBatchOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(StorageBatchOperationsClient)) -@mock.patch.object(StorageBatchOperationsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(StorageBatchOperationsAsyncClient)) -def test_storage_batch_operations_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - StorageBatchOperationsClient, StorageBatchOperationsAsyncClient -]) -@mock.patch.object(StorageBatchOperationsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsClient)) -@mock.patch.object(StorageBatchOperationsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(StorageBatchOperationsAsyncClient)) -def test_storage_batch_operations_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = StorageBatchOperationsClient._DEFAULT_UNIVERSE - default_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = StorageBatchOperationsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc"), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio"), - (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest"), -]) -def test_storage_batch_operations_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", grpc_helpers), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (StorageBatchOperationsClient, transports.StorageBatchOperationsRestTransport, "rest", None), -]) -def test_storage_batch_operations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_storage_batch_operations_client_client_options_from_dict(): - with mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = StorageBatchOperationsClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport, "grpc", grpc_helpers), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_storage_batch_operations_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "storagebatchoperations.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="storagebatchoperations.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = storage_batch_operations.ListJobsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == storage_batch_operations.ListJobsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - order_by='order_by_value', - ) - -def test_list_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc - - request = {} - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.ListJobsRequest): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - -def test_list_jobs_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = storage_batch_operations.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - storage_batch_operations.ListJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - storage_batch_operations.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - next_page_token='abc', - ), - storage_batch_operations.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - ], - next_page_token='ghi', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, storage_batch_operations_types.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - next_page_token='abc', - ), - storage_batch_operations.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - ], - next_page_token='ghi', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - next_page_token='abc', - ), - storage_batch_operations.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - ], - next_page_token='ghi', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, storage_batch_operations_types.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - next_page_token='abc', - ), - storage_batch_operations.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - ], - next_page_token='ghi', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations_types.Job( - name='name_value', - description='description_value', - state=storage_batch_operations_types.Job.State.RUNNING, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations_types.Job) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.state == storage_batch_operations_types.Job.State.RUNNING - - -def test_get_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = storage_batch_operations.GetJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == storage_batch_operations.GetJobRequest( - name='name_value', - ) - -def test_get_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc - - request = {} - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.GetJobRequest): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job( - name='name_value', - description='description_value', - state=storage_batch_operations_types.Job.State.RUNNING, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations_types.Job) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.state == storage_batch_operations_types.Job.State.RUNNING - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - -def test_get_job_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = storage_batch_operations_types.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations_types.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - storage_batch_operations.GetJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations_types.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - storage_batch_operations.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.CreateJobRequest, - dict, -]) -def test_create_job(request_type, transport: str = 'grpc'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.CreateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = storage_batch_operations.CreateJobRequest( - parent='parent_value', - job_id='job_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == storage_batch_operations.CreateJobRequest( - parent='parent_value', - job_id='job_id_value', - request_id='request_id_value', - ) - -def test_create_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc - request = {} - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_job] = mock_rpc - - request = {} - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.CreateJobRequest): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.CreateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - -def test_create_job_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent='parent_value', - job=storage_batch_operations_types.Job(name='name_value'), - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = storage_batch_operations_types.Job(name='name_value') - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - - -def test_create_job_flattened_error(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - storage_batch_operations.CreateJobRequest(), - parent='parent_value', - job=storage_batch_operations_types.Job(name='name_value'), - job_id='job_id_value', - ) - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent='parent_value', - job=storage_batch_operations_types.Job(name='name_value'), - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = storage_batch_operations_types.Job(name='name_value') - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - storage_batch_operations.CreateJobRequest(), - parent='parent_value', - job=storage_batch_operations_types.Job(name='name_value'), - job_id='job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.DeleteJobRequest, - dict, -]) -def test_delete_job(request_type, transport: str = 'grpc'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.DeleteJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = storage_batch_operations.DeleteJobRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == storage_batch_operations.DeleteJobRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc - request = {} - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_job] = mock_rpc - - request = {} - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.DeleteJobRequest): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.DeleteJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - -def test_delete_job_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = None - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_flattened_error(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - storage_batch_operations.DeleteJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - storage_batch_operations.DeleteJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.CancelJobRequest, - dict, -]) -def test_cancel_job(request_type, transport: str = 'grpc'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations.CancelJobResponse( - ) - response = client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations.CancelJobResponse) - - -def test_cancel_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = storage_batch_operations.CancelJobRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == storage_batch_operations.CancelJobRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_cancel_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc - - request = {} - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=storage_batch_operations.CancelJobRequest): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse( - )) - response = await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = storage_batch_operations.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations.CancelJobResponse) - - -@pytest.mark.asyncio -async def test_cancel_job_async_from_dict(): - await test_cancel_job_async(request_type=dict) - -def test_cancel_job_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = storage_batch_operations.CancelJobResponse() - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_job_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = storage_batch_operations.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse()) - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_job_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations.CancelJobResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_job_flattened_error(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - storage_batch_operations.CancelJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_job_flattened_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = storage_batch_operations.CancelJobResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_job_flattened_error_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_job( - storage_batch_operations.CancelJobRequest(), - name='name_value', - ) - - -def test_list_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_jobs_rest_required_fields(request_type=storage_batch_operations.ListJobsRequest): - transport_class = transports.StorageBatchOperationsRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.ListJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_jobs_rest_unset_required_fields(): - transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_jobs_rest_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.ListJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_batch_operations.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) - - -def test_list_jobs_rest_flattened_error(transport: str = 'rest'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - storage_batch_operations.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_rest_pager(transport: str = 'rest'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - next_page_token='abc', - ), - storage_batch_operations.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - ], - next_page_token='ghi', - ), - storage_batch_operations.ListJobsResponse( - jobs=[ - storage_batch_operations_types.Job(), - storage_batch_operations_types.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(storage_batch_operations.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, storage_batch_operations_types.Job) - for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_job_rest_required_fields(request_type=storage_batch_operations.GetJobRequest): - transport_class = transports.StorageBatchOperationsRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations_types.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations_types.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_job_rest_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations_types.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_batch_operations_types.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_get_job_rest_flattened_error(transport: str = 'rest'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - storage_batch_operations.GetJobRequest(), - name='name_value', - ) - - -def test_create_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc - - request = {} - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_job_rest_required_fields(request_type=storage_batch_operations.CreateJobRequest): - transport_class = transports.StorageBatchOperationsRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["job_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "jobId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == request_init["job_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["jobId"] = 'job_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("job_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == 'job_id_value' - - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_job(request) - - expected_params = [ - ( - "jobId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_rest_unset_required_fields(): - transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("jobId", "requestId", )) & set(("parent", "jobId", "job", ))) - - -def test_create_job_rest_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job=storage_batch_operations_types.Job(name='name_value'), - job_id='job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) - - -def test_create_job_rest_flattened_error(transport: str = 'rest'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - storage_batch_operations.CreateJobRequest(), - parent='parent_value', - job=storage_batch_operations_types.Job(name='name_value'), - job_id='job_id_value', - ) - - -def test_delete_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc - - request = {} - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_job_rest_required_fields(request_type=storage_batch_operations.DeleteJobRequest): - transport_class = transports.StorageBatchOperationsRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_job_rest_unset_required_fields(): - transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) - - -def test_delete_job_rest_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_delete_job_rest_flattened_error(transport: str = 'rest'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - storage_batch_operations.DeleteJobRequest(), - name='name_value', - ) - - -def test_cancel_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_cancel_job_rest_required_fields(request_type=storage_batch_operations.CancelJobRequest): - transport_class = transports.StorageBatchOperationsRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.CancelJobResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations.CancelJobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_job_rest_unset_required_fields(): - transport = transports.StorageBatchOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_cancel_job_rest_flattened(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.CancelJobResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_batch_operations.CancelJobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.cancel_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}:cancel" % client.transport._host, args[1]) - - -def test_cancel_job_rest_flattened_error(transport: str = 'rest'): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - storage_batch_operations.CancelJobRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.StorageBatchOperationsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.StorageBatchOperationsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = StorageBatchOperationsClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.StorageBatchOperationsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = StorageBatchOperationsClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = StorageBatchOperationsClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.StorageBatchOperationsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = StorageBatchOperationsClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.StorageBatchOperationsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = StorageBatchOperationsClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.StorageBatchOperationsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.StorageBatchOperationsGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.StorageBatchOperationsGrpcTransport, - transports.StorageBatchOperationsGrpcAsyncIOTransport, - transports.StorageBatchOperationsRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = StorageBatchOperationsClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = storage_batch_operations.ListJobsResponse() - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = storage_batch_operations_types.Job() - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_empty_call_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_empty_call_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = None - client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = storage_batch_operations.CancelJobResponse() - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CancelJobRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = StorageBatchOperationsAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_jobs_empty_call_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_job_empty_call_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations_types.Job( - name='name_value', - description='description_value', - state=storage_batch_operations_types.Job.State.RUNNING, - )) - await client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_job_empty_call_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_job_empty_call_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_job_empty_call_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(storage_batch_operations.CancelJobResponse( - )) - await client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CancelJobRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = StorageBatchOperationsClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_jobs_rest_bad_request(request_type=storage_batch_operations.ListJobsRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.ListJobsRequest, - dict, -]) -def test_list_jobs_rest_call_success(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_jobs_rest_interceptors(null_interceptor): - transport = transports.StorageBatchOperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), - ) - client = StorageBatchOperationsClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_list_jobs") as post, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_list_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = storage_batch_operations.ListJobsRequest.pb(storage_batch_operations.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = storage_batch_operations.ListJobsResponse.to_json(storage_batch_operations.ListJobsResponse()) - req.return_value.content = return_value - - request = storage_batch_operations.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = storage_batch_operations.ListJobsResponse() - post_with_metadata.return_value = storage_batch_operations.ListJobsResponse(), metadata - - client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_job_rest_bad_request(request_type=storage_batch_operations.GetJobRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job(request) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.GetJobRequest, - dict, -]) -def test_get_job_rest_call_success(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations_types.Job( - name='name_value', - description='description_value', - state=storage_batch_operations_types.Job.State.RUNNING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations_types.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations_types.Job) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.state == storage_batch_operations_types.Job.State.RUNNING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): - transport = transports.StorageBatchOperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), - ) - client = StorageBatchOperationsClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_get_job") as post, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_get_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_get_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = storage_batch_operations.GetJobRequest.pb(storage_batch_operations.GetJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = storage_batch_operations_types.Job.to_json(storage_batch_operations_types.Job()) - req.return_value.content = return_value - - request = storage_batch_operations.GetJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = storage_batch_operations_types.Job() - post_with_metadata.return_value = storage_batch_operations_types.Job(), metadata - - client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_job_rest_bad_request(request_type=storage_batch_operations.CreateJobRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_job(request) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.CreateJobRequest, - dict, -]) -def test_create_job_rest_call_success(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["job"] = {'name': 'name_value', 'description': 'description_value', 'bucket_list': {'buckets': [{'bucket': 'bucket_value', 'prefix_list': {'included_object_prefixes': ['included_object_prefixes_value1', 'included_object_prefixes_value2']}, 'manifest': {'manifest_location': 'manifest_location_value'}}]}, 'put_object_hold': {'temporary_hold': 1, 'event_based_hold': 1}, 'delete_object': {'permanent_object_deletion_enabled': True}, 'put_metadata': {'content_disposition': 'content_disposition_value', 'content_encoding': 'content_encoding_value', 'content_language': 'content_language_value', 'content_type': 'content_type_value', 'cache_control': 'cache_control_value', 'custom_time': 'custom_time_value', 'custom_metadata': {}}, 'rewrite_object': {'kms_key': 'kms_key_value'}, 'logging_config': {'log_actions': [6], 'log_action_states': [1]}, 'create_time': {'seconds': 751, 'nanos': 543}, 'schedule_time': {}, 'complete_time': {}, 'counters': {'total_object_count': 1922, 'succeeded_object_count': 2307, 'failed_object_count': 1987}, 'error_summaries': [{'error_code': 1, 'error_count': 1202, 'error_log_entries': [{'object_uri': 'object_uri_value', 'error_details': ['error_details_value1', 'error_details_value2']}]}], 'state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = storage_batch_operations.CreateJobRequest.meta.fields["job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_job(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): - transport = transports.StorageBatchOperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), - ) - client = StorageBatchOperationsClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_create_job") as post, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_create_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_create_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = storage_batch_operations.CreateJobRequest.pb(storage_batch_operations.CreateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = storage_batch_operations.CreateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_job_rest_bad_request(request_type=storage_batch_operations.DeleteJobRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job(request) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.DeleteJobRequest, - dict, -]) -def test_delete_job_rest_call_success(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): - transport = transports.StorageBatchOperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), - ) - client = StorageBatchOperationsClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_delete_job") as pre: - pre.assert_not_called() - pb_message = storage_batch_operations.DeleteJobRequest.pb(storage_batch_operations.DeleteJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = storage_batch_operations.DeleteJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_cancel_job_rest_bad_request(request_type=storage_batch_operations.CancelJobRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_job(request) - - -@pytest.mark.parametrize("request_type", [ - storage_batch_operations.CancelJobRequest, - dict, -]) -def test_cancel_job_rest_call_success(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = storage_batch_operations.CancelJobResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_batch_operations.CancelJobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, storage_batch_operations.CancelJobResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_job_rest_interceptors(null_interceptor): - transport = transports.StorageBatchOperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.StorageBatchOperationsRestInterceptor(), - ) - client = StorageBatchOperationsClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_cancel_job") as post, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "post_cancel_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.StorageBatchOperationsRestInterceptor, "pre_cancel_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = storage_batch_operations.CancelJobRequest.pb(storage_batch_operations.CancelJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = storage_batch_operations.CancelJobResponse.to_json(storage_batch_operations.CancelJobResponse()) - req.return_value.content = return_value - - request = storage_batch_operations.CancelJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = storage_batch_operations.CancelJobResponse() - post_with_metadata.return_value = storage_batch_operations.CancelJobResponse(), metadata - - client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_empty_call_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_empty_call_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = storage_batch_operations.CancelJobRequest() - - assert args[0] == request_msg - - -def test_storage_batch_operations_rest_lro_client(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.StorageBatchOperationsGrpcTransport, - ) - -def test_storage_batch_operations_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.StorageBatchOperationsTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_storage_batch_operations_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.StorageBatchOperationsTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_jobs', - 'get_job', - 'create_job', - 'delete_job', - 'cancel_job', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_storage_batch_operations_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.StorageBatchOperationsTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_storage_batch_operations_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.storagebatchoperations_v1.services.storage_batch_operations.transports.StorageBatchOperationsTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.StorageBatchOperationsTransport() - adc.assert_called_once() - - -def test_storage_batch_operations_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - StorageBatchOperationsClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.StorageBatchOperationsGrpcTransport, - transports.StorageBatchOperationsGrpcAsyncIOTransport, - ], -) -def test_storage_batch_operations_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.StorageBatchOperationsGrpcTransport, - transports.StorageBatchOperationsGrpcAsyncIOTransport, - transports.StorageBatchOperationsRestTransport, - ], -) -def test_storage_batch_operations_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.StorageBatchOperationsGrpcTransport, grpc_helpers), - (transports.StorageBatchOperationsGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_storage_batch_operations_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "storagebatchoperations.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="storagebatchoperations.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.StorageBatchOperationsGrpcTransport, transports.StorageBatchOperationsGrpcAsyncIOTransport]) -def test_storage_batch_operations_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_storage_batch_operations_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.StorageBatchOperationsRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_storage_batch_operations_host_no_port(transport_name): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='storagebatchoperations.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'storagebatchoperations.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://storagebatchoperations.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_storage_batch_operations_host_with_port(transport_name): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='storagebatchoperations.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'storagebatchoperations.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://storagebatchoperations.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_storage_batch_operations_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = StorageBatchOperationsClient( - credentials=creds1, - transport=transport_name, - ) - client2 = StorageBatchOperationsClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_jobs._session - session2 = client2.transport.list_jobs._session - assert session1 != session2 - session1 = client1.transport.get_job._session - session2 = client2.transport.get_job._session - assert session1 != session2 - session1 = client1.transport.create_job._session - session2 = client2.transport.create_job._session - assert session1 != session2 - session1 = client1.transport.delete_job._session - session2 = client2.transport.delete_job._session - assert session1 != session2 - session1 = client1.transport.cancel_job._session - session2 = client2.transport.cancel_job._session - assert session1 != session2 -def test_storage_batch_operations_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.StorageBatchOperationsGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_storage_batch_operations_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.StorageBatchOperationsGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.StorageBatchOperationsGrpcTransport, transports.StorageBatchOperationsGrpcAsyncIOTransport]) -def test_storage_batch_operations_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.StorageBatchOperationsGrpcTransport, transports.StorageBatchOperationsGrpcAsyncIOTransport]) -def test_storage_batch_operations_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_storage_batch_operations_grpc_lro_client(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_storage_batch_operations_grpc_lro_async_client(): - client = StorageBatchOperationsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_crypto_key_path(): - project = "squid" - location = "clam" - key_ring = "whelk" - crypto_key = "octopus" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - actual = StorageBatchOperationsClient.crypto_key_path(project, location, key_ring, crypto_key) - assert expected == actual - - -def test_parse_crypto_key_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "key_ring": "cuttlefish", - "crypto_key": "mussel", - } - path = StorageBatchOperationsClient.crypto_key_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_crypto_key_path(path) - assert expected == actual - -def test_job_path(): - project = "winkle" - location = "nautilus" - job = "scallop" - expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - actual = StorageBatchOperationsClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "abalone", - "location": "squid", - "job": "clam", - } - path = StorageBatchOperationsClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_job_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = StorageBatchOperationsClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = StorageBatchOperationsClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = StorageBatchOperationsClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = StorageBatchOperationsClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = StorageBatchOperationsClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = StorageBatchOperationsClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = StorageBatchOperationsClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = StorageBatchOperationsClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = StorageBatchOperationsClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = StorageBatchOperationsClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = StorageBatchOperationsClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.StorageBatchOperationsTransport, '_prep_wrapped_messages') as prep: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.StorageBatchOperationsTransport, '_prep_wrapped_messages') as prep: - transport_class = StorageBatchOperationsClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = StorageBatchOperationsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = StorageBatchOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (StorageBatchOperationsClient, transports.StorageBatchOperationsGrpcTransport), - (StorageBatchOperationsAsyncClient, transports.StorageBatchOperationsGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py index af28cc5d3d75..9dc4d4d2fd1a 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py @@ -53,10 +53,10 @@ class Job(proto.Message): Attributes: name (str): Identifier. The resource name of the Job. job_id is unique - within the project and location, that is either set by the - customer or defined by the service. Format: - projects/{project}/locations/{location}/jobs/{job_id} . For - example: "projects/123456/locations/us-central1/jobs/job01". + within the project, that is either set by the customer or + defined by the service. Format: + projects/{project}/locations/global/jobs/{job_id} . For + example: "projects/123456/locations/global/jobs/job01". description (str): Optional. A description provided by the user for the job. Its max length is 1024 bytes when From 87037d58acf5c40bbb68a9ad49230947f753ea4f Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Thu, 24 Apr 2025 10:57:19 -0700 Subject: [PATCH 3/3] Fix indentation for storage_batch_operations_types.py --- .../types/storage_batch_operations_types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py index 9dc4d4d2fd1a..4fdc8cff1dc8 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py @@ -449,7 +449,7 @@ class PutMetadata(proto.Message): content_type (str): Optional. Updates objects Content-Type fixed metadata. Unset values will be ignored. - Set empty values to clear the metadata. Refer + Set empty values to clear the metadata. Refer to documentation in https://cloud.google.com/storage/docs/metadata#content-type